Exemple #1
0
    def test_simple_intersect(self):
        cube = iris.cube.Cube(np.array([[1,2,3,4,5],
                                           [2,3,4,5,6],
                                           [3,4,5,6,7],
                                           [4,5,6,7,8],
                                           [5,6,7,8,9]], dtype=np.int32))

        lonlat_cs = iris.coord_systems.RotatedGeogCS(10, 20)
        cube.add_dim_coord(iris.coords.DimCoord(np.arange(5, dtype=np.float32) * 90 - 180, 'longitude', units='degrees', coord_system=lonlat_cs), 1)
        cube.add_dim_coord(iris.coords.DimCoord(np.arange(5, dtype=np.float32) * 45 - 90, 'latitude', units='degrees', coord_system=lonlat_cs), 0)
        cube.add_aux_coord(iris.coords.DimCoord(points=np.int32(11), long_name='pressure', units='Pa'))
        cube.rename("temperature")
        cube.units = "K"

        cube2 = iris.cube.Cube(np.array([[1,2,3,4,5],
                                            [2,3,4,5,6],
                                            [3,4,5,6,7],
                                            [4,5,6,7,8],
                                            [5,6,7,8,50]], dtype=np.int32))

        lonlat_cs = iris.coord_systems.RotatedGeogCS(10, 20)
        cube2.add_dim_coord(iris.coords.DimCoord(np.arange(5, dtype=np.float32) * 90, 'longitude', units='degrees', coord_system=lonlat_cs), 1)
        cube2.add_dim_coord(iris.coords.DimCoord(np.arange(5, dtype=np.float32) * 45 - 90, 'latitude', units='degrees', coord_system=lonlat_cs), 0)
        cube2.add_aux_coord(iris.coords.DimCoord(points=np.int32(11), long_name='pressure', units='Pa'))
        cube2.rename("")

        r = iris.analysis.maths.intersection_of_cubes(cube, cube2)
        self.assertCML(r, ('cdm', 'test_simple_cube_intersection.cml'))
Exemple #2
0
def _make_cube(field, converter):
    # Convert the field to a Cube.
    (factories, references, standard_name, long_name, units, attributes,
     cell_methods, dim_coords_and_dims, aux_coords_and_dims) = converter(field)

    try:
        data = field._data
    except AttributeError:
        data = field.data

    cube = iris.cube.Cube(data,
                          attributes=attributes,
                          cell_methods=cell_methods,
                          dim_coords_and_dims=dim_coords_and_dims,
                          aux_coords_and_dims=aux_coords_and_dims)

    # Temporary code to deal with invalid standard names in the
    # translation table.
    if standard_name is not None:
        cube.rename(standard_name)
    if long_name is not None:
        cube.long_name = long_name
    if units is not None:
        # Temporary code to deal with invalid units in the translation
        # table.
        try:
            cube.units = units
        except ValueError:
            msg = 'Ignoring PP invalid units {!r}'.format(units)
            warnings.warn(msg)
            cube.attributes['invalid_units'] = units
            cube.units = iris.unit._UNKNOWN_UNIT_STRING

    return cube, factories, references
Exemple #3
0
def _sanitise_metadata(cube, unit):
    """
    As part of the maths metadata contract, clear the necessary or
    unsupported metadata from the resultant cube of the maths operation.

    """
    # Clear the cube names.
    cube.rename(None)

    # Clear the cube cell methods.
    cube.cell_methods = None

    # Clear the cell measures.
    for cm in cube.cell_measures():
        cube.remove_cell_measure(cm)

    # Clear the ancillary variables.
    for av in cube.ancillary_variables():
        cube.remove_ancillary_variable(av)

    # Clear the STASH attribute, if present.
    if "STASH" in cube.attributes:
        del cube.attributes["STASH"]

    # Set the cube units.
    cube.units = unit
Exemple #4
0
def _make_cube(field, converter):
    # Convert the field to a Cube.
    metadata = converter(field)

    cube_data = field.core_data()
    cube = iris.cube.Cube(cube_data,
                          attributes=metadata.attributes,
                          cell_methods=metadata.cell_methods,
                          dim_coords_and_dims=metadata.dim_coords_and_dims,
                          aux_coords_and_dims=metadata.aux_coords_and_dims)

    # Temporary code to deal with invalid standard names in the
    # translation table.
    if metadata.standard_name is not None:
        cube.rename(metadata.standard_name)
    if metadata.long_name is not None:
        cube.long_name = metadata.long_name
    if metadata.units is not None:
        # Temporary code to deal with invalid units in the translation
        # table.
        try:
            cube.units = metadata.units
        except ValueError:
            msg = 'Ignoring PP invalid units {!r}'.format(metadata.units)
            warnings.warn(msg)
            cube.attributes['invalid_units'] = metadata.units
            cube.units = cf_units._UNKNOWN_UNIT_STRING

    return cube, metadata.factories, metadata.references
Exemple #5
0
def _make_cube(field, converter):
    # Convert the field to a Cube.
    metadata = converter(field)

    cube_data = field.core_data()
    cube = iris.cube.Cube(cube_data,
                          attributes=metadata.attributes,
                          cell_methods=metadata.cell_methods,
                          dim_coords_and_dims=metadata.dim_coords_and_dims,
                          aux_coords_and_dims=metadata.aux_coords_and_dims)

    # Temporary code to deal with invalid standard names in the
    # translation table.
    if metadata.standard_name is not None:
        cube.rename(metadata.standard_name)
    if metadata.long_name is not None:
        cube.long_name = metadata.long_name
    if metadata.units is not None:
        # Temporary code to deal with invalid units in the translation
        # table.
        try:
            cube.units = metadata.units
        except ValueError:
            msg = 'Ignoring PP invalid units {!r}'.format(metadata.units)
            warnings.warn(msg)
            cube.attributes['invalid_units'] = metadata.units
            cube.units = cf_units._UNKNOWN_UNIT_STRING

    return cube, metadata.factories, metadata.references
Exemple #6
0
def _make_cube(field, converter):
    # Convert the field to a Cube.
    (factories, references, standard_name, long_name, units, attributes,
     cell_methods, dim_coords_and_dims, aux_coords_and_dims) = converter(field)

    try:
        data = field._data
    except AttributeError:
        data = field.data

    cube = iris.cube.Cube(data,
                          attributes=attributes,
                          cell_methods=cell_methods,
                          dim_coords_and_dims=dim_coords_and_dims,
                          aux_coords_and_dims=aux_coords_and_dims)

    # Temporary code to deal with invalid standard names in the
    # translation table.
    if standard_name is not None:
        cube.rename(standard_name)
    if long_name is not None:
        cube.long_name = long_name
    if units is not None:
        # Temporary code to deal with invalid units in the translation
        # table.
        try:
            cube.units = units
        except ValueError:
            msg = 'Ignoring PP invalid units {!r}'.format(units)
            warnings.warn(msg)
            cube.attributes['invalid_units'] = units
            cube.units = iris.unit._UNKNOWN_UNIT_STRING

    return cube, factories, references
Exemple #7
0
    def _process_action_result(self, obj, cube):
        """Process the result of an action."""

        factory = None

        # NB. The names such as 'CoordAndDims' and 'CellMethod' are defined by
        # the "deferred import" performed by Rule.run_actions() above.
        if isinstance(obj, CoordAndDims):
            obj.add_coord(cube)

        #cell methods - not yet implemented
        elif isinstance(obj, CellMethod):
            cube.add_cell_method(obj)

        elif isinstance(obj, CMAttribute):
            # Temporary code to deal with invalid standard names from the translation table.
            # TODO: when name is "standard_name" force the value to be a real standard name
            if obj.name == 'standard_name' and obj.value is not None:
                cube.rename(obj.value)
            elif obj.name == 'units':
                # Graceful loading of units.
                try:
                    setattr(cube, obj.name, obj.value)
                except ValueError:
                    msg = 'Ignoring PP invalid units {!r}'.format(obj.value)
                    warnings.warn(msg)
                    cube.attributes['invalid_units'] = obj.value
                    cube.units = cf_units._UNKNOWN_UNIT_STRING
            else:
                setattr(cube, obj.name, obj.value)

        elif isinstance(obj, CMCustomAttribute):
            cube.attributes[obj.name] = obj.value

        elif isinstance(obj, Factory):
            factory = obj

        elif isinstance(obj, DebugString):
            print(obj)

        # The function returned nothing, like the pp save actions, "lbft = 3"
        elif obj is None:
            pass

        else:
            raise Exception(
                "Object could not be added to cube. Unknown type: " +
                obj.__class__.__name__)

        return factory
Exemple #8
0
    def _process_action_result(self, obj, cube):
        """Process the result of an action."""

        factory = None

        # NB. The names such as 'CoordAndDims' and 'CellMethod' are defined by
        # the "deferred import" performed by Rule.run_actions() above.
        if isinstance(obj, CoordAndDims):
            obj.add_coord(cube)

        #cell methods - not yet implemented
        elif isinstance(obj, CellMethod):
            cube.add_cell_method(obj)

        elif isinstance(obj, CMAttribute):
            # Temporary code to deal with invalid standard names from the translation table.
            # TODO: when name is "standard_name" force the value to be a real standard name
            if obj.name == 'standard_name' and obj.value is not None:
                cube.rename(obj.value)
            elif obj.name == 'units':
                # Graceful loading of units.
                try:
                    setattr(cube, obj.name, obj.value)
                except ValueError:
                    msg = 'Ignoring PP invalid units {!r}'.format(obj.value)
                    warnings.warn(msg)
                    cube.attributes['invalid_units'] = obj.value
                    cube.units = iris.unit._UNKNOWN_UNIT_STRING
            else:
                setattr(cube, obj.name, obj.value)

        elif isinstance(obj, CMCustomAttribute):
            cube.attributes[obj.name] = obj.value

        elif isinstance(obj, Factory):
            factory = obj

        elif isinstance(obj, DebugString):
            print obj

        # The function returned nothing, like the pp save actions, "lbft = 3"
        elif obj is None:
            pass

        else:
            raise Exception("Object could not be added to cube. Unknown type: " + obj.__class__.__name__)

        return factory
Exemple #9
0
    def _process_action_result(self, obj, cube):
        """Process the result of an action."""

        factory = None

        # NB. The names such as 'Coord' and 'CellMethod' are defined by
        # the "deferred import" performed by Rule.run_actions() above.
        if isinstance(obj, Coord):
            cube.add_coord(obj)

        elif isinstance(obj, CoordAndDims):
            obj.add_coord(cube)

        elif isinstance(obj, Factory):
            factory = obj

        #cell methods - not yet implemented
        elif isinstance(obj, CellMethod):
            cube.add_cell_method(obj)
            
        elif isinstance(obj, DebugString):
            print obj

        elif isinstance(obj, CMAttribute):
            # Temporary code to deal with invalid standard names from the translation table.
            # TODO: when name is "standard_name" force the value to be a real standard name
            if obj.name == 'standard_name' and obj.value is not None:
                cube.rename(obj.value)
            else:
                setattr(cube, obj.name, obj.value)
            
        elif isinstance(obj, CMCustomAttribute):
            cube.attributes[obj.name] = obj.value

        # The function returned nothing, like the pp save actions, "lbft = 3"
        elif obj is None:
            pass
        
        else:
            raise Exception("Object could not be added to cube. Unknown type: " + obj.__class__.__name__)

        return factory
Exemple #10
0
def calculate(name, cubelist):
    """Calculates any variable available from the cubelist

    Args:
        name (string): The CF standard name of the variable to be calculated

        cubelist (iris.cube.CubeList): A cubelist containing either the
            requested variable or the variables required to calculate the
            requested variable

    Returns:
        iris.cube.Cube: The variable requested

    Raises:
        ValueError: If the requested variable is not available or there are
            multiple matching cubes in the input `cubelist`.
    """
    # If the cube is in the cubelist simply extract and return it
    newcubelist = cubelist.extract(name)
    if len(newcubelist) == 1:
        return newcubelist[0].copy()

    elif len(newcubelist) > 1:
        raise ValueError('Multiple cubes found matching ' + name +
                         ' not sure which to use')

    # If an equation is present for the requested variable the try to derive the
    # variable from existing cube in the input cubelist
    elif name in available:
        # Calculate all required variables
        args = [calc(var, cubelist) for var in available[name]['required']]
        # Call the function to calculate the requested variable
        cube = available[name]['function'](*args)
        cube.rename(name)
        return cube
    else:
        raise ValueError('Can not get ' + name + ' from cubelist.')
Exemple #11
0
def curl(i_cube, j_cube, k_cube=None, ignore=None):
    r'''
    Calculate the 3d curl of the given vector of cubes.

    Args:

    * i_cube
        The i cube of the vector to operate on
    * j_cube
        The j cube of the vector to operate on

    Kwargs:

    * k_cube
        The k cube of the vector to operate on

    Return (i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube)

    The calculation of curl is dependent on the type of
    :func:`iris.coord_systems.CoordSystem` in the cube:

        Cartesian curl

            The Cartesian curl is defined as:

            .. math::

                \nabla\times \vec u =
                (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z})\vec a_i
                -
                (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j
                +
                (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k

        Spherical curl

            When spherical calculus is used, i_cube is the phi vector
            component (e.g. eastward), j_cube is the theta component
            (e.g. northward) and k_cube is the radial component.

            The spherical curl is defined as:

            .. math::

                \nabla\times \vec A = \frac{1}{r cos \theta}
                (\frac{\delta}{\delta \theta}
                (\vec A_\phi cos \theta) -
                \frac{\delta \vec A_\theta}{\delta \phi}) \vec r +
                \frac{1}{r}(\frac{1}{cos \theta}
                \frac{\delta \vec A_r}{\delta \phi} -
                \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta +
                \frac{1}{r}
                (\frac{\delta}{\delta r}(r \vec A_\theta) -
                \frac{\delta \vec A_r}{\delta \theta}) \vec \phi

            where phi is longitude, theta is latitude.

    '''
    if ignore is not None:
        ignore = None
        warnings.warn('The ignore keyword to iris.analysis.calculus.curl '
                      'is deprecated, ignoring is now done automatically.')

    # Get the vector quantity names.
    # (i.e. ['easterly', 'northerly', 'vertical'])
    vector_quantity_names, phenomenon_name = \
        spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube)

    cubes = filter(None, [i_cube, j_cube, k_cube])

    # get the names of all coords binned into useful comparison groups
    coord_comparison = iris.analysis.coord_comparison(*cubes)

    bad_coords = coord_comparison['ungroupable_and_dimensioned']
    if bad_coords:
        raise ValueError("Coordinates found in one cube that describe "
                         "a data dimension which weren't in the other "
                         "cube ({}), try removing this coordinate.".format(
                             ', '.join(group.name() for group in bad_coords)))

    bad_coords = coord_comparison['resamplable']
    if bad_coords:
        raise ValueError('Some coordinates are different ({}), consider '
                         'resampling.'.format(
                             ', '.join(group.name() for group in bad_coords)))

    ignore_string = ''
    if coord_comparison['ignorable']:
        ignore_string = ' (ignoring {})'.format(
            ', '.join(group.name() for group in bad_coords))

    # Get the dim_coord, or None if none exist, for the xyz dimensions
    x_coord = i_cube.coord(axis='X')
    y_coord = i_cube.coord(axis='Y')
    z_coord = i_cube.coord(axis='Z')

    y_dim = i_cube.coord_dims(y_coord)[0]

    horiz_cs = i_cube.coord_system('CoordSystem')

    # Planar (non spherical) coords?
    ellipsoidal = isinstance(horiz_cs, (iris.coord_systems.GeogCS,
                                        iris.coord_systems.RotatedGeogCS))
    if not ellipsoidal:

        # TODO Implement some mechanism for conforming to a common grid
        dj_dx = _curl_differentiate(j_cube, x_coord)
        prototype_diff = dj_dx

        # i curl component (dk_dy - dj_dz)
        dk_dy = _curl_differentiate(k_cube, y_coord)
        dk_dy = _curl_regrid(dk_dy, prototype_diff)
        dj_dz = _curl_differentiate(j_cube, z_coord)
        dj_dz = _curl_regrid(dj_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding
        # does not support).
        if dj_dz is not None and dj_dz.data.shape != prototype_diff.data.shape:
            dj_dz = _curl_change_z(dj_dz, z_coord, prototype_diff)

        i_cmpt = _curl_subtract(dk_dy, dj_dz)
        dj_dz = dk_dy = None

        # j curl component (di_dz - dk_dx)
        di_dz = _curl_differentiate(i_cube, z_coord)
        di_dz = _curl_regrid(di_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding
        # does not support).
        if di_dz is not None and di_dz.data.shape != prototype_diff.data.shape:
            di_dz = _curl_change_z(di_dz, z_coord, prototype_diff)

        dk_dx = _curl_differentiate(k_cube, x_coord)
        dk_dx = _curl_regrid(dk_dx, prototype_diff)
        j_cmpt = _curl_subtract(di_dz, dk_dx)
        di_dz = dk_dx = None

        # k curl component ( dj_dx - di_dy)
        di_dy = _curl_differentiate(i_cube, y_coord)
        di_dy = _curl_regrid(di_dy, prototype_diff)
        # Since prototype_diff == dj_dx we don't need to recalculate dj_dx
#        dj_dx = _curl_differentiate(j_cube, x_coord)
#        dj_dx = _curl_regrid(dj_dx, prototype_diff)
        k_cmpt = _curl_subtract(dj_dx, di_dy)
        di_dy = dj_dx = None

        result = [i_cmpt, j_cmpt, k_cmpt]

    # Spherical coords (GeogCS or RotatedGeogCS).
    else:
        # A_\phi = i ; A_\theta = j ; A_\r = k
        # theta = lat ; phi = long ;
        # r_cmpt = 1 / (r * cos(lat)) *
        #    (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi)
        # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
        # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
        if y_coord.name() != 'latitude' or x_coord.name() != 'longitude':
            raise ValueError('Expecting latitude as the y coord and '
                             'longitude as the x coord for spherical curl.')

        # Get the radius of the earth - and check for sphericity
        ellipsoid = horiz_cs
        if isinstance(horiz_cs, iris.coord_systems.RotatedGeogCS):
            ellipsoid = horiz_cs.ellipsoid
        if ellipsoid:
            # TODO: Add a test for this
            r = ellipsoid.semi_major_axis
            r_unit = iris.unit.Unit("m")
            spherical = (ellipsoid.inverse_flattening == 0.0)
        else:
            r = DEFAULT_SPHERICAL_EARTH_RADIUS
            r_unit = DEFAULT_SPHERICAL_EARTH_RADIUS_UNIT
            spherical = True

        if not spherical:
            raise ValueError('Cannot take the curl over a non-spherical '
                             'ellipsoid.')

        lon_coord = x_coord.copy()
        lat_coord = y_coord.copy()
        lon_coord.convert_units('radians')
        lat_coord.convert_units('radians')
        lat_cos_coord = _coord_cos(lat_coord)

        # TODO Implement some mechanism for conforming to a common grid
        temp = iris.analysis.maths.multiply(i_cube, lat_cos_coord, y_dim)
        dicos_dtheta = _curl_differentiate(temp, lat_coord)
        prototype_diff = dicos_dtheta

        # r curl component: 1 / (r * cos(lat)) * (dicos_dtheta - d_j_cube_dphi)
        # Since prototype_diff == dicos_dtheta we don't need to
        # recalculate dicos_dtheta.
        d_j_cube_dphi = _curl_differentiate(j_cube, lon_coord)
        d_j_cube_dphi = _curl_regrid(d_j_cube_dphi, prototype_diff)
        new_lat_coord = d_j_cube_dphi.coord('latitude')
        new_lat_cos_coord = _coord_cos(new_lat_coord)
        lat_dim = d_j_cube_dphi.coord_dims(new_lat_coord)[0]
        r_cmpt = iris.analysis.maths.divide(_curl_subtract(dicos_dtheta,
                                                           d_j_cube_dphi),
                                            r * new_lat_cos_coord, dim=lat_dim)
        r_cmpt.units = r_cmpt.units / r_unit
        d_j_cube_dphi = dicos_dtheta = None

        # phi curl component: 1/r * ( drj_dr - d_k_cube_dtheta)
        drj_dr = _curl_differentiate(r * j_cube, z_coord)
        if drj_dr is not None:
            drj_dr.units = drj_dr.units * r_unit
        drj_dr = _curl_regrid(drj_dr, prototype_diff)
        d_k_cube_dtheta = _curl_differentiate(k_cube, lat_coord)
        d_k_cube_dtheta = _curl_regrid(d_k_cube_dtheta, prototype_diff)
        if drj_dr is None and d_k_cube_dtheta is None:
            phi_cmpt = None
        else:
            phi_cmpt = 1/r * _curl_subtract(drj_dr, d_k_cube_dtheta)
            phi_cmpt.units = phi_cmpt.units / r_unit

        drj_dr = d_k_cube_dtheta = None

        # theta curl component: 1/r * ( 1/cos(lat) * d_k_cube_dphi - dri_dr )
        d_k_cube_dphi = _curl_differentiate(k_cube, lon_coord)
        d_k_cube_dphi = _curl_regrid(d_k_cube_dphi, prototype_diff)
        if d_k_cube_dphi is not None:
            d_k_cube_dphi = iris.analysis.maths.divide(d_k_cube_dphi,
                                                       lat_cos_coord)
        dri_dr = _curl_differentiate(r * i_cube, z_coord)
        if dri_dr is not None:
            dri_dr.units = dri_dr.units * r_unit
        dri_dr = _curl_regrid(dri_dr, prototype_diff)
        if d_k_cube_dphi is None and dri_dr is None:
            theta_cmpt = None
        else:
            theta_cmpt = 1/r * _curl_subtract(d_k_cube_dphi, dri_dr)
            theta_cmpt.units = theta_cmpt.units / r_unit
        d_k_cube_dphi = dri_dr = None

        result = [phi_cmpt, theta_cmpt, r_cmpt]

    for direction, cube in zip(vector_quantity_names, result):
        if cube is not None:
            cube.rename('%s curl of %s' % (direction, phenomenon_name))

    return result
Exemple #12
0
def load_NAMEIII_trajectory(filename):
    """
    Load a NAME III trajectory file returning a
    generator of :class:`iris.cube.Cube` instances.

    Args:

    * filename (string):
        Name of file to load.

    Returns:
        A generator :class:`iris.cube.Cube` instances.

    """
    time_unit = cf_units.Unit('hours since epoch',
                              calendar=cf_units.CALENDAR_GREGORIAN)

    with open(filename, 'r') as infile:
        header = read_header(infile)

        # read the column headings
        for line in infile:
            if line.startswith("    "):
                break
        headings = [heading.strip() for heading in line.split(",")]

        # read the columns
        columns = [[] for i in range(len(headings))]
        for line in infile:
            values = [v.strip() for v in line.split(",")]
            for c, v in enumerate(values):
                if "UTC" in v:
                    v = v.replace(":00 ", " ")  # Strip out milliseconds.
                    v = datetime.datetime.strptime(v, NAMEIII_DATETIME_FORMAT)
                else:
                    try:
                        v = float(v)
                    except ValueError:
                        pass
                columns[c].append(v)

    # Where's the Z column?
    z_column = None
    for i, heading in enumerate(headings):
        if heading.startswith("Z "):
            z_column = i
            break
    if z_column is None:
        raise TranslationError("Expected a Z column")

    # Every column up to Z becomes a coordinate.
    coords = []
    for name, values in zip(headings[:z_column+1], columns[:z_column+1]):
        values = np.array(values)
        if np.all(np.array(values) == values[0]):
            values = [values[0]]

        standard_name = long_name = units = None
        if isinstance(values[0], datetime.datetime):
            values = time_unit.date2num(values)
            units = time_unit
            if name == "Time":
                name = "time"
        elif " (Lat-Long)" in name:
            if name.startswith("X"):
                name = "longitude"
            elif name.startswith("Y"):
                name = "latitude"
            units = "degrees"
        elif name == "Z (m asl)":
            name = "altitude"
            units = "m"
            long_name = "altitude above sea level"
        elif name == "Z (m agl)":
            name = 'height'
            units = "m"
            long_name = "height above ground level"
        elif name == "Z (FL)":
            name = "flight_level"
            long_name = name

        try:
            coord = DimCoord(values, units=units)
        except ValueError:
            coord = AuxCoord(values, units=units)
        coord.rename(name)
        if coord.long_name is None and long_name is not None:
            coord.long_name = long_name
        coords.append(coord)

    # Every numerical column after the Z becomes a cube.
    for name, values in zip(headings[z_column+1:], columns[z_column+1:]):
        try:
            float(values[0])
        except ValueError:
            continue
        # units embedded in column heading?
        name, units = _split_name_and_units(name)
        cube = iris.cube.Cube(values, units=units)
        cube.rename(name)
        for coord in coords:
            dim = 0 if len(coord.points) > 1 else None
            if isinstance(coord, DimCoord) and coord.name() == "time":
                cube.add_dim_coord(coord.copy(), dim)
            else:
                cube.add_aux_coord(coord.copy(), dim)
        yield cube
Exemple #13
0
def _generate_cubes(header, column_headings, coords, data_arrays,
                    cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in six.iteritems(column_headings)}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = '{} {}'.format(field_headings['Species'],
                              field_headings['Quantity'])
        name = name.upper().replace(' ', '_')
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings['Unit'])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        z_coord = _cf_height_from_name(field_headings['Z'])
        cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = cf_units.Unit(
            'hours since epoch', calendar=cf_units.CALENDAR_GREGORIAN)

        # Build time, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == 'latitude' or coord.name == 'longitude':
                coord_units = 'degrees'
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if coord.name == 'time':
                coord_units = time_unit
                pts = time_unit.date2num(coord.values)

            if coord.dimension is not None:
                if coord.name == 'longitude':
                    circular = iris.util._is_circular(pts, 360.0)
                else:
                    circular = False
                icoord = DimCoord(points=pts,
                                  standard_name=coord.name,
                                  units=coord_units,
                                  coord_system=coord_sys,
                                  circular=circular)
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(
                        np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(points=pts[i],
                                  standard_name=coord.name,
                                  coord_system=coord_sys,
                                  units=coord_units)
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(
                        np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :]
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = ['X', 'Y', 'Z', 'Time', 'Unit', 'Av or Int period',
                    'X grid origin', 'Y grid origin',
                    'X grid size', 'Y grid size',
                    'X grid resolution', 'Y grid resolution', ]

        # Add the Main Headings as attributes.
        for key, value in six.iteritems(header):
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in six.iteritems(field_headings):
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube
Exemple #14
0
def _generate_cubes(header,
                    column_headings,
                    coords,
                    data_arrays,
                    cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in column_headings.items()}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = "{} {}".format(field_headings["Species"],
                              field_headings["Quantity"])
        name = name.upper().replace(" ", "_")
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings["Units"])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        if "Z" in field_headings:
            (upper_bound, ) = [
                field_headings["... to [Z]"]
                if "... to [Z]" in field_headings else None
            ]
            (lower_bound, ) = [
                field_headings["... from [Z]"]
                if "... from [Z]" in field_headings else None
            ]
            z_coord = _cf_height_from_name(
                field_headings["Z"],
                upper_bound=upper_bound,
                lower_bound=lower_bound,
            )
            cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = cf_units.Unit("hours since epoch",
                                  calendar=cf_units.CALENDAR_GREGORIAN)

        # Build time, height, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == "latitude" or coord.name == "longitude":
                coord_units = "degrees"
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if (coord.name == "projection_x_coordinate"
                    or coord.name == "projection_y_coordinate"):
                coord_units = "m"
                coord_sys = iris.coord_systems.OSGB()
            if coord.name == "height":
                coord_units = "m"
                long_name = "height above ground level"
                pts = coord.values
            if coord.name == "altitude":
                coord_units = "m"
                long_name = "altitude above sea level"
                pts = coord.values
            if coord.name == "air_pressure":
                coord_units = "Pa"
                pts = coord.values
            if coord.name == "flight_level":
                pts = coord.values
                long_name = "flight_level"
                coord_units = _parse_units("FL")
            if coord.name == "time":
                coord_units = time_unit
                pts = time_unit.date2num(coord.values).astype(float)

            if coord.dimension is not None:
                if coord.name == "longitude":
                    circular = iris.util._is_circular(pts, 360.0)
                else:
                    circular = False
                if coord.name == "flight_level":
                    icoord = DimCoord(points=pts,
                                      units=coord_units,
                                      long_name=long_name)
                else:
                    icoord = DimCoord(
                        points=pts,
                        standard_name=coord.name,
                        units=coord_units,
                        coord_system=coord_sys,
                        circular=circular,
                    )
                if coord.name == "height" or coord.name == "altitude":
                    icoord.long_name = long_name
                if (coord.name == "time"
                        and "Av or Int period" in field_headings):
                    dt = coord.values - field_headings["Av or Int period"]
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds.astype(float)
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(
                    points=pts[i],
                    standard_name=coord.name,
                    coord_system=coord_sys,
                    units=coord_units,
                )
                if (coord.name == "time"
                        and "Av or Int period" in field_headings):
                    dt = coord.values - field_headings["Av or Int period"]
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :].astype(float)
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = [
            "X",
            "Y",
            "Z",
            "Time",
            "T",
            "Units",
            "Av or Int period",
            "... from [Z]",
            "... to [Z]",
            "X grid origin",
            "Y grid origin",
            "X grid size",
            "Y grid size",
            "X grid resolution",
            "Y grid resolution",
            "Number of field cols",
            "Number of preliminary cols",
            "Number of fields",
            "Number of series",
            "Output format",
        ]

        # Add the Main Headings as attributes.
        for key, value in header.items():
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in field_headings.items():
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube
Exemple #15
0
def load_NAMEIII_trajectory(filename):
    """
    Load a NAME III trajectory file returning a
    generator of :class:`iris.cube.Cube` instances.

    Args:

    * filename (string):
        Name of file to load.

    Returns:
        A generator :class:`iris.cube.Cube` instances.

    """
    time_unit = cf_units.Unit("hours since epoch",
                              calendar=cf_units.CALENDAR_GREGORIAN)

    with open(filename, "r") as infile:
        header = read_header(infile)

        # read the column headings
        for line in infile:
            if line.startswith("    "):
                break
        headings = [heading.strip() for heading in line.split(",")]

        # read the columns
        columns = [[] for i in range(len(headings))]
        for line in infile:
            values = [v.strip() for v in line.split(",")]
            for c, v in enumerate(values):
                if "UTC" in v:
                    v = datetime.datetime.strptime(v, NAMETRAJ_DATETIME_FORMAT)
                else:
                    try:
                        v = float(v)
                    except ValueError:
                        pass
                columns[c].append(v)

    # Sort columns according to PP Index
    columns_t = list(map(list, zip(*columns)))
    columns_t.sort(key=itemgetter(1))
    columns = list(map(list, zip(*columns_t)))

    # Where's the Z column?
    z_column = None
    for i, heading in enumerate(headings):
        if heading.startswith("Z "):
            z_column = i
            break
    if z_column is None:
        raise TranslationError("Expected a Z column")

    # Every column up to Z becomes a coordinate.
    coords = []
    for name, values in zip(headings[:z_column + 1], columns[:z_column + 1]):
        values = np.array(values)
        if np.all(np.array(values) == values[0]):
            values = [values[0]]

        long_name = units = None
        if isinstance(values[0], datetime.datetime):
            values = time_unit.date2num(values).astype(float)
            units = time_unit
            if name == "Time":
                name = "time"
        elif " (Lat-Long)" in name:
            if name.startswith("X"):
                name = "longitude"
            elif name.startswith("Y"):
                name = "latitude"
            units = "degrees"
        elif name == "Z (m asl)":
            name = "altitude"
            units = "m"
            long_name = "altitude above sea level"
        elif name == "Z (m agl)":
            name = "height"
            units = "m"
            long_name = "height above ground level"
        elif name == "Z (FL)":
            name = "flight_level"
            long_name = name

        try:
            coord = DimCoord(values, units=units)
        except ValueError:
            coord = AuxCoord(values, units=units)
        coord.rename(name)
        if coord.long_name is None and long_name is not None:
            coord.long_name = long_name
        coords.append(coord)

    # Every numerical column after the Z becomes a cube.
    for name, values in zip(headings[z_column + 1:], columns[z_column + 1:]):
        try:
            float(values[0])
        except ValueError:
            continue
        # units embedded in column heading?
        name, units = _split_name_and_units(name)
        cube = iris.cube.Cube(values, units=units)
        cube.rename(name)
        # Add the Main Headings as attributes.
        for key, value in header.items():
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value
        # Add coordinates
        for coord in coords:
            dim = 0 if len(coord.points) > 1 else None
            if dim == 0 and coord.name() == "time":
                cube.add_dim_coord(coord.copy(), dim)
            elif dim == 0 and coord.name() == "PP Index":
                cube.add_dim_coord(coord.copy(), dim)
            else:
                cube.add_aux_coord(coord.copy(), dim)
        yield cube
Exemple #16
0
def curl(i_cube, j_cube, k_cube=None, ignore=None, update_history=True):
    r'''
    Calculate the 3d curl of the given vector of cubes.

    Args:
    
    * i_cube
        The i cube of the vector to operate on
    * j_cube
        The j cube of the vector to operate on
        
    Kwargs:
    
    * k_cube
        The k cube of the vector to operate on        

    Return (i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube)
    
    The calculation of curl is dependent on the type of :func:`iris.coord_systems.HorizontalCS` in the cube:
    
        Cartesian curl
        
            The Cartesian curl is defined as:
        
            .. math::
            
                \nabla\times \vec u = (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z}) \vec a_i - (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j + (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k
        
        Spherical curl
            
            When spherical calculus is used, i_cube is the phi vector component (e.g. eastward), j_cube is the theta component 
            (e.g. northward) and k_cube is the radial component.
    
            The spherical curl is defined as:
        
            .. math::
                
                \nabla\times \vec A = \frac{1}{r cos \theta}(\frac{\delta}{\delta \theta}(\vec A_\phi cos \theta) - \frac{\delta \vec A_\theta}{\delta \phi}) \vec r + \frac{1}{r}(\frac{1}{cos \theta} \frac{\delta \vec A_r}{\delta \phi} - \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta + \frac{1}{r}(\frac{\delta}{\delta r}(r \vec A_\theta) - \frac{\delta \vec A_r}{\delta \theta}) \vec \phi
    
            where phi is longitude, theta is latitude.

    '''
    if ignore is not None:
        ignore = None
        warnings.warn('The ignore keyword to iris.analysis.calculus.curl is deprecated, ignoring is now done automatically.')
    
    # get the radius of the earth
    latlon_cs = i_cube.coord_system(iris.coord_systems.LatLonCS)
    if latlon_cs and latlon_cs.datum.is_spherical():
        r = latlon_cs.datum.semi_major_axis
        r_unit = latlon_cs.datum.units
    else:
        r = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS
        r_unit = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS_UNIT


    # Get the vector quantity names (i.e. ['easterly', 'northerly', 'vertical'])
    vector_quantity_names, phenomenon_name = spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube)
    
    cubes = filter(None, [i_cube, j_cube, k_cube])
    
    # get the names of all coords binned into useful comparison groups
    coord_comparison = iris.analysis.coord_comparison(*cubes)
    
    bad_coords = coord_comparison['ungroupable_and_dimensioned']
    if bad_coords:
        raise ValueError("Coordinates found in one cube that describe a data dimension which weren't in the other "
                         "cube (%s), try removing this coordinate."  % ', '.join([group.name() for group in bad_coords]))
    
    bad_coords = coord_comparison['resamplable']
    if bad_coords:
        raise ValueError('Some coordinates are different (%s), consider resampling.' % ', '.join([group.name() for group in bad_coords]))
    
    ignore_string = ''
    if coord_comparison['ignorable']:
        ignore_string = ' (ignoring %s)' % ', '.join([group.name() for group in bad_coords])

    # Get the dim_coord, or None if none exist, for the xyz dimensions
    x_coord = i_cube.coord(axis='X') 
    y_coord = i_cube.coord(axis='Y')
    z_coord = i_cube.coord(axis='Z')
    
    y_dim = i_cube.coord_dims(y_coord)[0]
   
    horiz_cs = i_cube.coord_system('HorizontalCS')
    if horiz_cs is None:
        raise ValueError('Could not get the horizontal CS of the cubes provided.')
        
    if horiz_cs.cs_type == iris.coord_systems.CARTESIAN_CS:
        
        # TODO Implement some mechanism for conforming to a common grid
        dj_dx = _curl_differentiate(j_cube, x_coord)
        prototype_diff = dj_dx
                
        # i curl component (dk_dy - dj_dz)
        dk_dy = _curl_differentiate(k_cube, y_coord)
        dk_dy = _curl_regrid(dk_dy, prototype_diff)
        dj_dz = _curl_differentiate(j_cube, z_coord)
        dj_dz = _curl_regrid(dj_dz, prototype_diff)
        
        # TODO Implement resampling in the vertical (which regridding does not support).
        if dj_dz is not None and dj_dz.data.shape != prototype_diff.data.shape:
            dj_dz = _curl_change_z(dj_dz, z_coord, prototype_diff)

        i_cmpt = _curl_subtract(dk_dy, dj_dz)
        dj_dz = dk_dy = None
        
        # j curl component (di_dz - dk_dx)
        di_dz = _curl_differentiate(i_cube, z_coord)
        di_dz = _curl_regrid(di_dz, prototype_diff)
        
        # TODO Implement resampling in the vertical (which regridding does not support).
        if di_dz is not None and di_dz.data.shape != prototype_diff.data.shape:
            di_dz = _curl_change_z(di_dz, z_coord, prototype_diff)

        dk_dx = _curl_differentiate(k_cube, x_coord)
        dk_dx = _curl_regrid(dk_dx, prototype_diff)
        j_cmpt = _curl_subtract(di_dz, dk_dx)
        di_dz = dk_dx = None
        
        # k curl component ( dj_dx - di_dy)
        di_dy = _curl_differentiate(i_cube, y_coord)
        di_dy = _curl_regrid(di_dy, prototype_diff)
        # Since prototype_diff == dj_dx we don't need to recalculate dj_dx
#        dj_dx = _curl_differentiate(j_cube, x_coord)
#        dj_dx = _curl_regrid(dj_dx, prototype_diff)
        k_cmpt = _curl_subtract(dj_dx, di_dy)
        di_dy = dj_dx = None
        
        result = [i_cmpt, j_cmpt, k_cmpt]
    
    elif horiz_cs.cs_type == iris.coord_systems.SPHERICAL_CS:
        # A_\phi = i ; A_\theta = j ; A_\r = k
        # theta = lat ; phi = long ;
        # r_cmpt = 1/ ( r * cos(lat) ) * ( d/dtheta ( i_cube * sin( lat ) ) - d_j_cube_dphi )
        # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
        # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
        if not horiz_cs.datum.is_spherical():
            raise NotImplementedError('Cannot take the curl over a non-spherical datum.')
        
        if y_coord.name() != 'latitude' or x_coord.name() != 'longitude':
            raise ValueError('Expecting latitude as the y coord and longitude as the x coord for spherical curl.')
        
        lon_coord = x_coord.unit_converted('radians')
        lat_coord = y_coord.unit_converted('radians')
        lat_cos_coord = _coord_cos(lat_coord)

        # TODO Implement some mechanism for conforming to a common grid
        temp = iris.analysis.maths.multiply(i_cube, lat_cos_coord, y_dim)
        dicos_dtheta = _curl_differentiate(temp, lat_coord)
        prototype_diff = dicos_dtheta
        
        # r curl component:  1/ ( r * cos(lat) ) * ( dicos_dtheta - d_j_cube_dphi )
        # Since prototype_diff == dicos_dtheta we don't need to recalculate dicos_dtheta
        d_j_cube_dphi = _curl_differentiate(j_cube, lon_coord)
        d_j_cube_dphi = _curl_regrid(d_j_cube_dphi, prototype_diff)
        new_lat_coord = d_j_cube_dphi.coord(name='latitude')
        new_lat_cos_coord = _coord_cos(new_lat_coord)
        lat_dim = d_j_cube_dphi.coord_dims(new_lat_coord)[0]
        r_cmpt = iris.analysis.maths.divide(_curl_subtract(dicos_dtheta, d_j_cube_dphi), r * new_lat_cos_coord, dim=lat_dim)
        r_cmpt.units = r_cmpt.units / r_unit
        d_j_cube_dphi = dicos_dtheta = None
        
        # phi curl component: 1/r * ( drj_dr - d_k_cube_dtheta)
        drj_dr = _curl_differentiate(r * j_cube, z_coord)
        if drj_dr is not None:
            drj_dr.units = drj_dr.units * r_unit
        drj_dr = _curl_regrid(drj_dr, prototype_diff)
        d_k_cube_dtheta = _curl_differentiate(k_cube, lat_coord)
        d_k_cube_dtheta = _curl_regrid(d_k_cube_dtheta, prototype_diff)
        if drj_dr is None and d_k_cube_dtheta is None:
            phi_cmpt = None
        else:
            phi_cmpt = 1/r * _curl_subtract(drj_dr, d_k_cube_dtheta)
            phi_cmpt.units = phi_cmpt.units / r_unit
            
        drj_dr = d_k_cube_dtheta = None
        
        # theta curl component: 1/r * ( 1/cos(lat) * d_k_cube_dphi - dri_dr )
        d_k_cube_dphi = _curl_differentiate(k_cube, lon_coord)
        d_k_cube_dphi = _curl_regrid(d_k_cube_dphi, prototype_diff)
        if d_k_cube_dphi is not None:
            d_k_cube_dphi = iris.analysis.maths.divide(d_k_cube_dphi, lat_cos_coord)
        dri_dr = _curl_differentiate(r * i_cube, z_coord)
        if dri_dr is not None:
            dri_dr.units = dri_dr.units * r_unit
        dri_dr = _curl_regrid(dri_dr, prototype_diff)
        if d_k_cube_dphi is None and dri_dr is None:
            theta_cmpt = None
        else:
            theta_cmpt = 1/r * _curl_subtract(d_k_cube_dphi, dri_dr)
            theta_cmpt.units = theta_cmpt.units / r_unit
        d_k_cube_dphi = dri_dr = None
        
        result = [phi_cmpt, theta_cmpt, r_cmpt]
    
    else:
        raise ValueError("Horizontal coord system neither cartesian nor spherical spheroid: %s %s (%s)" \
                         % (type(horiz_cs), horiz_cs.cs_type, horiz_cs.datum))
    
    for direction, cube in zip(vector_quantity_names, result):
        if cube is not None:
            cube.rename('%s curl of %s' % (direction, phenomenon_name))
        
            if update_history:
                # Add history in place
                if k_cube is None:
                    cube.add_history('%s cmpt of the curl of %s and %s%s' % \
                                     (direction, i_cube.name(), j_cube.name(), ignore_string))
                else:
                    cube.add_history('%s cmpt of the curl of %s, %s and %s%s' % \
                                     (direction, i_cube.name(), j_cube.name(), k_cube.name(), ignore_string))
        
    return result
Exemple #17
0
def load_NAMEIII_trajectory(filename):
    """
    Load a NAME III trajectory file returning a
    generator of :class:`iris.cube.Cube` instances.

    Args:

    * filename (string):
        Name of file to load.

    Returns:
        A generator :class:`iris.cube.Cube` instances.

    """
    time_unit = iris.unit.Unit('hours since epoch',
                               calendar=iris.unit.CALENDAR_GREGORIAN)

    with open(filename, 'r') as infile:
        header = read_header(infile)

        # read the column headings
        for line in infile:
            if line.startswith("    "):
                break
        headings = [heading.strip() for heading in line.split(",")]

        # read the columns
        columns = [[] for i in range(len(headings))]
        for line in infile:
            values = [v.strip() for v in line.split(",")]
            for c, v in enumerate(values):
                if "UTC" in v:
                    v = v.replace(":00 ", " ")  # Strip out milliseconds.
                    v = datetime.datetime.strptime(v, NAMEIII_DATETIME_FORMAT)
                else:
                    try:
                        v = float(v)
                    except ValueError:
                        pass
                columns[c].append(v)

    # Where's the Z column?
    z_column = None
    for i, heading in enumerate(headings):
        if heading.startswith("Z "):
            z_column = i
            break
    if z_column is None:
        raise iris.exceptions.TranslationError("Expected a Z column")

    # Every column up to Z becomes a coordinate.
    coords = []
    for name, values in izip(headings[:z_column + 1], columns[:z_column + 1]):
        values = np.array(values)
        if np.all(np.array(values) == values[0]):
            values = [values[0]]

        standard_name = long_name = units = None
        if isinstance(values[0], datetime.datetime):
            values = time_unit.date2num(values)
            units = time_unit
            if name == "Time":
                name = "time"
        elif " (Lat-Long)" in name:
            if name.startswith("X"):
                name = "longitude"
            elif name.startswith("Y"):
                name = "latitude"
            units = "degrees"
        elif name == "Z (m asl)":
            name = "height"
            units = "m"

        try:
            coord = DimCoord(values, units=units)
        except ValueError:
            coord = AuxCoord(values, units=units)
        coord.rename(name)
        coords.append(coord)

    # Every numerical column after the Z becomes a cube.
    for name, values in izip(headings[z_column + 1:], columns[z_column + 1:]):
        try:
            float(values[0])
        except ValueError:
            continue
        # units embedded in column heading?
        name, units = _split_name_and_units(name)
        cube = iris.cube.Cube(values, units=units)
        cube.rename(name)
        for coord in coords:
            dim = 0 if len(coord.points) > 1 else None
            if isinstance(coord, DimCoord) and coord.name() == "time":
                cube.add_dim_coord(coord.copy(), dim)
            else:
                cube.add_aux_coord(coord.copy(), dim)
        yield cube
Exemple #18
0
    def test_handmade(self):
        # Test xml output of a handmade cube.        
        data = numpy.array( [ [1, 2, 3, 4, 5], 
                              [2, 3, 4, 5, 6],
                              [3, 4, 5, 6, 7],
                              [4, 5, 6, 7, 8],
                              [5, 6, 7, 8, 9] ], dtype=numpy.int32)
        cubes = []

        # Different types of test
        for ll_dtype in [numpy.float32, numpy.int32]:
            for rotated in [False, True]:
                for forecast_or_time_mean in ["forecast", "time_mean"]:
                    for TEST_COMPAT_i in xrange(2): # TODO: remove with TEST_COMPAT purge - 
                                                    # adds two copies of each cube to cube list
                                                    # in line with redundant data first option
                        cube = iris.cube.Cube(data)

                        cube.attributes['my_attribute'] = 'foobar'
                        
                        if rotated == False:
                            pole_pos = coord_systems.GeoPosition(90, 0)
                        else:
                            pole_pos = coord_systems.GeoPosition(30, 150)

                        lonlat_cs = coord_systems.LatLonCS("datum?", "prime_meridian?", pole_pos, "reference_longitude?")
                        cube.add_dim_coord(coords.DimCoord(numpy.array([-180, -90, 0, 90, 180], dtype=ll_dtype), 
                                           'longitude', units='degrees', coord_system=lonlat_cs), 1)
                        cube.add_dim_coord(coords.DimCoord(numpy.array([-90, -45, 0, 45, 90], dtype=ll_dtype), 
                                           'latitude', units='degrees', coord_system=lonlat_cs), 0)
                        
                        # height
                        cube.add_aux_coord(coords.AuxCoord(numpy.array([1000], dtype=numpy.int32), 
                                                           long_name='pressure', units='Pa'))

                        # phenom
                        cube.rename("temperature")
                        cube.units = "K"

                        # source
                        cube.add_aux_coord(coords.AuxCoord(points=["itbb"], long_name='source', units="no_unit"))

                        # forecast dates
                        if forecast_or_time_mean == "forecast":
                            unit = iris.unit.Unit('hours since epoch', calendar=iris.unit.CALENDAR_GREGORIAN)
                            dt = datetime.datetime(2010, 12, 31, 12, 0)
                            cube.add_aux_coord(coords.AuxCoord(numpy.array([6], dtype=numpy.int32), 
                                                               standard_name='forecast_period', units='hours'))
                            cube.add_aux_coord(coords.AuxCoord(numpy.array([unit.date2num(dt)], dtype=numpy.float64), 
                                                               standard_name='time', units=unit))

                        # time mean dates
                        if forecast_or_time_mean == "time_mean":
                            unit = iris.unit.Unit('hours since epoch', calendar=iris.unit.CALENDAR_GREGORIAN)
                            dt1 = datetime.datetime(2010, 12, 31, 6, 0)
                            dt2 = datetime.datetime(2010, 12, 31, 12, 0)
                            dt_mid = datetime.datetime(2010, 12, 31, 9, 0)
                            cube.add_aux_coord(coords.AuxCoord(numpy.array([6], dtype=numpy.int32), 
                                                               standard_name='forecast_period', units='hours'))
                            cube.add_aux_coord(coords.AuxCoord(numpy.array(unit.date2num(dt_mid), dtype=numpy.float64),
                                                               standard_name='time', units=unit, 
                                                               bounds=numpy.array([unit.date2num(dt1), unit.date2num(dt2)], 
                                                                                  dtype=numpy.float64)))
                            cube.add_cell_method(coords.CellMethod('mean', cube.coord('forecast_period')))

                        cubes.append(cube)
        
        # Now we've made all sorts of cube, check the xml...
        self.assertCML(cubes, ('xml', 'handmade.cml'))
Exemple #19
0
def _generate_cubes(header,
                    column_headings,
                    coords,
                    data_arrays,
                    cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in column_headings.iteritems()}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = '{} {}'.format(field_headings['Species'],
                              field_headings['Quantity'])
        name = name.upper().replace(' ', '_')
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings['Unit'])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        z_coord = _cf_height_from_name(field_headings['Z'])
        cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = iris.unit.Unit('hours since epoch',
                                   calendar=iris.unit.CALENDAR_GREGORIAN)

        # Build time, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == 'latitude' or coord.name == 'longitude':
                coord_units = 'degrees'
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if coord.name == 'time':
                coord_units = time_unit
                pts = time_unit.date2num(coord.values)

            if coord.dimension is not None:
                icoord = DimCoord(points=pts,
                                  standard_name=coord.name,
                                  units=coord_units,
                                  coord_system=coord_sys)
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(points=pts[i],
                                  standard_name=coord.name,
                                  coord_system=coord_sys,
                                  units=coord_units)
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :]
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = [
            'X',
            'Y',
            'Z',
            'Time',
            'Unit',
            'Av or Int period',
            'X grid origin',
            'Y grid origin',
            'X grid size',
            'Y grid size',
            'X grid resolution',
            'Y grid resolution',
        ]

        # Add the Main Headings as attributes.
        for key, value in header.iteritems():
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in field_headings.iteritems():
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube
Exemple #20
0
def curl(i_cube, j_cube, k_cube=None, ignore=None):
    r'''
    Calculate the 3d curl of the given vector of cubes.

    Args:

    * i_cube
        The i cube of the vector to operate on
    * j_cube
        The j cube of the vector to operate on

    Kwargs:

    * k_cube
        The k cube of the vector to operate on

    Return (i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube)

    The calculation of curl is dependent on the type of :func:`iris.coord_systems.CoordSystem` in the cube:

        Cartesian curl

            The Cartesian curl is defined as:

            .. math::

                \nabla\times \vec u = (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z}) \vec a_i - (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j + (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k

        Spherical curl

            When spherical calculus is used, i_cube is the phi vector component (e.g. eastward), j_cube is the theta component
            (e.g. northward) and k_cube is the radial component.

            The spherical curl is defined as:

            .. math::

                \nabla\times \vec A = \frac{1}{r cos \theta}(\frac{\delta}{\delta \theta}(\vec A_\phi cos \theta) - \frac{\delta \vec A_\theta}{\delta \phi}) \vec r + \frac{1}{r}(\frac{1}{cos \theta} \frac{\delta \vec A_r}{\delta \phi} - \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta + \frac{1}{r}(\frac{\delta}{\delta r}(r \vec A_\theta) - \frac{\delta \vec A_r}{\delta \theta}) \vec \phi

            where phi is longitude, theta is latitude.

    '''
    if ignore is not None:
        ignore = None
        warnings.warn(
            'The ignore keyword to iris.analysis.calculus.curl is deprecated, ignoring is now done automatically.'
        )

    # Get the vector quantity names (i.e. ['easterly', 'northerly', 'vertical'])
    vector_quantity_names, phenomenon_name = spatial_vectors_with_phenom_name(
        i_cube, j_cube, k_cube)

    cubes = filter(None, [i_cube, j_cube, k_cube])

    # get the names of all coords binned into useful comparison groups
    coord_comparison = iris.analysis.coord_comparison(*cubes)

    bad_coords = coord_comparison['ungroupable_and_dimensioned']
    if bad_coords:
        raise ValueError(
            "Coordinates found in one cube that describe a data dimension which weren't in the other "
            "cube (%s), try removing this coordinate." %
            ', '.join([group.name() for group in bad_coords]))

    bad_coords = coord_comparison['resamplable']
    if bad_coords:
        raise ValueError(
            'Some coordinates are different (%s), consider resampling.' %
            ', '.join([group.name() for group in bad_coords]))

    ignore_string = ''
    if coord_comparison['ignorable']:
        ignore_string = ' (ignoring %s)' % ', '.join(
            [group.name() for group in bad_coords])

    # Get the dim_coord, or None if none exist, for the xyz dimensions
    x_coord = i_cube.coord(axis='X')
    y_coord = i_cube.coord(axis='Y')
    z_coord = i_cube.coord(axis='Z')

    y_dim = i_cube.coord_dims(y_coord)[0]

    horiz_cs = i_cube.coord_system('CoordSystem')

    # Planar (non spherical) coords?
    ellipsoidal = isinstance(
        horiz_cs,
        (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS))
    if not ellipsoidal:

        # TODO Implement some mechanism for conforming to a common grid
        dj_dx = _curl_differentiate(j_cube, x_coord)
        prototype_diff = dj_dx

        # i curl component (dk_dy - dj_dz)
        dk_dy = _curl_differentiate(k_cube, y_coord)
        dk_dy = _curl_regrid(dk_dy, prototype_diff)
        dj_dz = _curl_differentiate(j_cube, z_coord)
        dj_dz = _curl_regrid(dj_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding does not support).
        if dj_dz is not None and dj_dz.data.shape != prototype_diff.data.shape:
            dj_dz = _curl_change_z(dj_dz, z_coord, prototype_diff)

        i_cmpt = _curl_subtract(dk_dy, dj_dz)
        dj_dz = dk_dy = None

        # j curl component (di_dz - dk_dx)
        di_dz = _curl_differentiate(i_cube, z_coord)
        di_dz = _curl_regrid(di_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding does not support).
        if di_dz is not None and di_dz.data.shape != prototype_diff.data.shape:
            di_dz = _curl_change_z(di_dz, z_coord, prototype_diff)

        dk_dx = _curl_differentiate(k_cube, x_coord)
        dk_dx = _curl_regrid(dk_dx, prototype_diff)
        j_cmpt = _curl_subtract(di_dz, dk_dx)
        di_dz = dk_dx = None

        # k curl component ( dj_dx - di_dy)
        di_dy = _curl_differentiate(i_cube, y_coord)
        di_dy = _curl_regrid(di_dy, prototype_diff)
        # Since prototype_diff == dj_dx we don't need to recalculate dj_dx
        #        dj_dx = _curl_differentiate(j_cube, x_coord)
        #        dj_dx = _curl_regrid(dj_dx, prototype_diff)
        k_cmpt = _curl_subtract(dj_dx, di_dy)
        di_dy = dj_dx = None

        result = [i_cmpt, j_cmpt, k_cmpt]

    # Spherical coords (GeogCS or RotatedGeogCS).
    else:
        # A_\phi = i ; A_\theta = j ; A_\r = k
        # theta = lat ; phi = long ;
        # r_cmpt = 1/ ( r * cos(lat) ) * ( d/dtheta ( i_cube * sin( lat ) ) - d_j_cube_dphi )
        # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
        # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
        if y_coord.name() != 'latitude' or x_coord.name() != 'longitude':
            raise ValueError(
                'Expecting latitude as the y coord and longitude as the x coord for spherical curl.'
            )

        # Get the radius of the earth - and check for sphericity
        ellipsoid = horiz_cs
        if isinstance(horiz_cs, iris.coord_systems.RotatedGeogCS):
            ellipsoid = horiz_cs.ellipsoid
        if ellipsoid:
            # TODO: Add a test for this
            r = ellipsoid.semi_major_axis
            r_unit = iris.unit.Unit("m")
            spherical = (ellipsoid.inverse_flattening == 0.0)
        else:
            r = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS
            r_unit = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS_UNIT
            spherical = True

        if not spherical:
            raise ValueError(
                "Cannot take the curl over a non-spherical ellipsoid.")

        lon_coord = x_coord.copy()
        lat_coord = y_coord.copy()
        lon_coord.convert_units('radians')
        lat_coord.convert_units('radians')
        lat_cos_coord = _coord_cos(lat_coord)

        # TODO Implement some mechanism for conforming to a common grid
        temp = iris.analysis.maths.multiply(i_cube, lat_cos_coord, y_dim)
        dicos_dtheta = _curl_differentiate(temp, lat_coord)
        prototype_diff = dicos_dtheta

        # r curl component:  1/ ( r * cos(lat) ) * ( dicos_dtheta - d_j_cube_dphi )
        # Since prototype_diff == dicos_dtheta we don't need to recalculate dicos_dtheta
        d_j_cube_dphi = _curl_differentiate(j_cube, lon_coord)
        d_j_cube_dphi = _curl_regrid(d_j_cube_dphi, prototype_diff)
        new_lat_coord = d_j_cube_dphi.coord(name='latitude')
        new_lat_cos_coord = _coord_cos(new_lat_coord)
        lat_dim = d_j_cube_dphi.coord_dims(new_lat_coord)[0]
        r_cmpt = iris.analysis.maths.divide(_curl_subtract(
            dicos_dtheta, d_j_cube_dphi),
                                            r * new_lat_cos_coord,
                                            dim=lat_dim)
        r_cmpt.units = r_cmpt.units / r_unit
        d_j_cube_dphi = dicos_dtheta = None

        # phi curl component: 1/r * ( drj_dr - d_k_cube_dtheta)
        drj_dr = _curl_differentiate(r * j_cube, z_coord)
        if drj_dr is not None:
            drj_dr.units = drj_dr.units * r_unit
        drj_dr = _curl_regrid(drj_dr, prototype_diff)
        d_k_cube_dtheta = _curl_differentiate(k_cube, lat_coord)
        d_k_cube_dtheta = _curl_regrid(d_k_cube_dtheta, prototype_diff)
        if drj_dr is None and d_k_cube_dtheta is None:
            phi_cmpt = None
        else:
            phi_cmpt = 1 / r * _curl_subtract(drj_dr, d_k_cube_dtheta)
            phi_cmpt.units = phi_cmpt.units / r_unit

        drj_dr = d_k_cube_dtheta = None

        # theta curl component: 1/r * ( 1/cos(lat) * d_k_cube_dphi - dri_dr )
        d_k_cube_dphi = _curl_differentiate(k_cube, lon_coord)
        d_k_cube_dphi = _curl_regrid(d_k_cube_dphi, prototype_diff)
        if d_k_cube_dphi is not None:
            d_k_cube_dphi = iris.analysis.maths.divide(d_k_cube_dphi,
                                                       lat_cos_coord)
        dri_dr = _curl_differentiate(r * i_cube, z_coord)
        if dri_dr is not None:
            dri_dr.units = dri_dr.units * r_unit
        dri_dr = _curl_regrid(dri_dr, prototype_diff)
        if d_k_cube_dphi is None and dri_dr is None:
            theta_cmpt = None
        else:
            theta_cmpt = 1 / r * _curl_subtract(d_k_cube_dphi, dri_dr)
            theta_cmpt.units = theta_cmpt.units / r_unit
        d_k_cube_dphi = dri_dr = None

        result = [phi_cmpt, theta_cmpt, r_cmpt]

    for direction, cube in zip(vector_quantity_names, result):
        if cube is not None:
            cube.rename('%s curl of %s' % (direction, phenomenon_name))

    return result
Exemple #21
0
def curl(i_cube, j_cube, k_cube=None):
    r"""
    Calculate the 2-dimensional or 3-dimensional spherical or cartesian
    curl of the given vector of cubes.

    As well as the standard x and y coordinates, this function requires each
    cube to possess a vertical or z-like coordinate (representing some form
    of height or pressure).  This can be a scalar or dimension coordinate.

    Args:

    * i_cube
        The i cube of the vector to operate on
    * j_cube
        The j cube of the vector to operate on

    Kwargs:

    * k_cube
        The k cube of the vector to operate on

    Return (i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube)

    If the k-cube is not passed in then the 2-dimensional curl will
    be calculated, yielding the result: [None, None, k_cube].
    If the k-cube is passed in, the 3-dimensional curl will
    be calculated, returning 3 component cubes.

    All cubes passed in must have the same data units, and those units
    must be spatially-derived (e.g. 'm/s' or 'km/h').

    The calculation of curl is dependent on the type of
    :func:`~iris.coord_systems.CoordSystem` in the cube.
    If the :func:`~iris.coord_systems.CoordSystem` is either
    GeogCS or RotatedGeogCS, the spherical curl will be calculated; otherwise
    the cartesian curl will be calculated:

        Cartesian curl

            When cartesian calculus is used, i_cube is the u component,
            j_cube is the v component and k_cube is the w component.

            The Cartesian curl is defined as:

            .. math::

                \nabla\times \vec u =
                (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z})\vec a_i
                -
                (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j
                +
                (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k

        Spherical curl

            When spherical calculus is used, i_cube is the :math:`\phi` vector
            component (e.g. eastward), j_cube is the :math:`\theta` component
            (e.g. northward) and k_cube is the radial component.

            The spherical curl is defined as:

            .. math::

                \nabla\times \vec A = \frac{1}{r cos \theta}
                (\frac{\delta}{\delta \theta}
                (\vec A_\phi cos \theta) -
                \frac{\delta \vec A_\theta}{\delta \phi}) \vec r +
                \frac{1}{r}(\frac{1}{cos \theta}
                \frac{\delta \vec A_r}{\delta \phi} -
                \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta +
                \frac{1}{r}
                (\frac{\delta}{\delta r}(r \vec A_\theta) -
                \frac{\delta \vec A_r}{\delta \theta}) \vec \phi

            where phi is longitude, theta is latitude.

    """
    # Get the vector quantity names.
    # (i.e. ['easterly', 'northerly', 'vertical'])
    vector_quantity_names, phenomenon_name = \
        spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube)

    cubes = filter(None, [i_cube, j_cube, k_cube])

    # get the names of all coords binned into useful comparison groups
    coord_comparison = iris.analysis.coord_comparison(*cubes)

    bad_coords = coord_comparison['ungroupable_and_dimensioned']
    if bad_coords:
        raise ValueError("Coordinates found in one cube that describe "
                         "a data dimension which weren't in the other "
                         "cube ({}), try removing this coordinate.".format(
                             ', '.join(group.name() for group in bad_coords)))

    bad_coords = coord_comparison['resamplable']
    if bad_coords:
        raise ValueError('Some coordinates are different ({}), consider '
                         'resampling.'.format(', '.join(
                             group.name() for group in bad_coords)))

    ignore_string = ''
    if coord_comparison['ignorable']:
        ignore_string = ' (ignoring {})'.format(', '.join(
            group.name() for group in bad_coords))

    # Get the dim_coord, or None if none exist, for the xyz dimensions
    x_coord = i_cube.coord(axis='X')
    y_coord = i_cube.coord(axis='Y')
    z_coord = i_cube.coord(axis='Z')

    y_dim = i_cube.coord_dims(y_coord)[0]

    horiz_cs = i_cube.coord_system('CoordSystem')

    # Non-spherical coords?
    spherical_coords = isinstance(
        horiz_cs,
        (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS))
    if not spherical_coords:

        # TODO Implement some mechanism for conforming to a common grid
        dj_dx = _curl_differentiate(j_cube, x_coord)
        prototype_diff = dj_dx

        # i curl component (dk_dy - dj_dz)
        dk_dy = _curl_differentiate(k_cube, y_coord)
        dk_dy = _curl_regrid(dk_dy, prototype_diff)
        dj_dz = _curl_differentiate(j_cube, z_coord)
        dj_dz = _curl_regrid(dj_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding
        # does not support).
        if dj_dz is not None and dj_dz.data.shape != prototype_diff.data.shape:
            dj_dz = _curl_change_z(dj_dz, z_coord, prototype_diff)

        i_cmpt = _curl_subtract(dk_dy, dj_dz)
        dj_dz = dk_dy = None

        # j curl component (di_dz - dk_dx)
        di_dz = _curl_differentiate(i_cube, z_coord)
        di_dz = _curl_regrid(di_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding
        # does not support).
        if di_dz is not None and di_dz.data.shape != prototype_diff.data.shape:
            di_dz = _curl_change_z(di_dz, z_coord, prototype_diff)

        dk_dx = _curl_differentiate(k_cube, x_coord)
        dk_dx = _curl_regrid(dk_dx, prototype_diff)
        j_cmpt = _curl_subtract(di_dz, dk_dx)
        di_dz = dk_dx = None

        # k curl component ( dj_dx - di_dy)
        di_dy = _curl_differentiate(i_cube, y_coord)
        di_dy = _curl_regrid(di_dy, prototype_diff)
        # Since prototype_diff == dj_dx we don't need to recalculate dj_dx
        #        dj_dx = _curl_differentiate(j_cube, x_coord)
        #        dj_dx = _curl_regrid(dj_dx, prototype_diff)
        k_cmpt = _curl_subtract(dj_dx, di_dy)
        di_dy = dj_dx = None

        result = [i_cmpt, j_cmpt, k_cmpt]

    # Spherical coords (GeogCS or RotatedGeogCS).
    else:
        # A_\phi = i ; A_\theta = j ; A_\r = k
        # theta = lat ; phi = long ;
        # r_cmpt = 1 / (r * cos(lat)) *
        #    (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi)
        # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
        # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
        if y_coord.name() not in ['latitude', 'grid_latitude'] \
                or x_coord.name() not in ['longitude', 'grid_longitude']:
            raise ValueError('Expecting latitude as the y coord and '
                             'longitude as the x coord for spherical curl.')

        # Get the radius of the earth - and check for sphericity
        ellipsoid = horiz_cs
        if isinstance(horiz_cs, iris.coord_systems.RotatedGeogCS):
            ellipsoid = horiz_cs.ellipsoid
        if ellipsoid:
            # TODO: Add a test for this
            r = ellipsoid.semi_major_axis
            r_unit = cf_units.Unit("m")
            spherical = (ellipsoid.inverse_flattening == 0.0)
        else:
            r = DEFAULT_SPHERICAL_EARTH_RADIUS
            r_unit = DEFAULT_SPHERICAL_EARTH_RADIUS_UNIT
            spherical = True

        if not spherical:
            raise ValueError('Cannot take the curl over a non-spherical '
                             'ellipsoid.')

        lon_coord = x_coord.copy()
        lat_coord = y_coord.copy()
        lon_coord.convert_units('radians')
        lat_coord.convert_units('radians')
        lat_cos_coord = _coord_cos(lat_coord)

        # TODO Implement some mechanism for conforming to a common grid
        temp = iris.analysis.maths.multiply(i_cube, lat_cos_coord, y_dim)
        dicos_dtheta = _curl_differentiate(temp, lat_coord)
        prototype_diff = dicos_dtheta

        # r curl component: 1 / (r * cos(lat)) * (d_j_cube_dphi - dicos_dtheta)
        # Since prototype_diff == dicos_dtheta we don't need to
        # recalculate dicos_dtheta.
        d_j_cube_dphi = _curl_differentiate(j_cube, lon_coord)
        d_j_cube_dphi = _curl_regrid(d_j_cube_dphi, prototype_diff)
        new_lat_coord = d_j_cube_dphi.coord(axis='Y')
        new_lat_cos_coord = _coord_cos(new_lat_coord)
        lat_dim = d_j_cube_dphi.coord_dims(new_lat_coord)[0]
        r_cmpt = iris.analysis.maths.divide(_curl_subtract(
            d_j_cube_dphi, dicos_dtheta),
                                            r * new_lat_cos_coord,
                                            dim=lat_dim)
        r_cmpt.units = r_cmpt.units / r_unit
        d_j_cube_dphi = dicos_dtheta = None

        # phi curl component: 1/r * ( drj_dr - d_k_cube_dtheta)
        drj_dr = _curl_differentiate(r * j_cube, z_coord)
        if drj_dr is not None:
            drj_dr.units = drj_dr.units * r_unit
        drj_dr = _curl_regrid(drj_dr, prototype_diff)
        d_k_cube_dtheta = _curl_differentiate(k_cube, lat_coord)
        d_k_cube_dtheta = _curl_regrid(d_k_cube_dtheta, prototype_diff)
        if drj_dr is None and d_k_cube_dtheta is None:
            phi_cmpt = None
        else:
            phi_cmpt = 1 / r * _curl_subtract(drj_dr, d_k_cube_dtheta)
            phi_cmpt.units = phi_cmpt.units / r_unit

        drj_dr = d_k_cube_dtheta = None

        # theta curl component: 1/r * ( 1/cos(lat) * d_k_cube_dphi - dri_dr )
        d_k_cube_dphi = _curl_differentiate(k_cube, lon_coord)
        d_k_cube_dphi = _curl_regrid(d_k_cube_dphi, prototype_diff)
        if d_k_cube_dphi is not None:
            d_k_cube_dphi = iris.analysis.maths.divide(d_k_cube_dphi,
                                                       lat_cos_coord)
        dri_dr = _curl_differentiate(r * i_cube, z_coord)
        if dri_dr is not None:
            dri_dr.units = dri_dr.units * r_unit
        dri_dr = _curl_regrid(dri_dr, prototype_diff)
        if d_k_cube_dphi is None and dri_dr is None:
            theta_cmpt = None
        else:
            theta_cmpt = 1 / r * _curl_subtract(d_k_cube_dphi, dri_dr)
            theta_cmpt.units = theta_cmpt.units / r_unit
        d_k_cube_dphi = dri_dr = None

        result = [phi_cmpt, theta_cmpt, r_cmpt]

    for direction, cube in zip(vector_quantity_names, result):
        if cube is not None:
            cube.rename('%s curl of %s' % (direction, phenomenon_name))

    return result
Exemple #22
0
def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in six.iteritems(column_headings)}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = "{} {}".format(field_headings["Species"], field_headings["Quantity"])
        name = name.upper().replace(" ", "_")
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings["Units"])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        if "Z" in field_headings:
            upper_bound, = [field_headings["... to [Z]"] if "... to [Z]" in field_headings else None]
            lower_bound, = [field_headings["... from [Z]"] if "... from [Z]" in field_headings else None]
            z_coord = _cf_height_from_name(field_headings["Z"], upper_bound=upper_bound, lower_bound=lower_bound)
            cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = cf_units.Unit("hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN)

        # Build time, height, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == "latitude" or coord.name == "longitude":
                coord_units = "degrees"
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if coord.name == "projection_x_coordinate" or coord.name == "projection_y_coordinate":
                coord_units = "m"
                coord_sys = iris.coord_systems.OSGB()
            if coord.name == "height":
                coord_units = "m"
                long_name = "height above ground level"
                pts = coord.values
            if coord.name == "altitude":
                coord_units = "m"
                long_name = "altitude above sea level"
                pts = coord.values
            if coord.name == "air_pressure":
                coord_units = "Pa"
                pts = coord.values
            if coord.name == "flight_level":
                pts = coord.values
                long_name = "flight_level"
                coord_units = _parse_units("FL")
            if coord.name == "time":
                coord_units = time_unit
                pts = time_unit.date2num(coord.values)

            if coord.dimension is not None:
                if coord.name == "longitude":
                    circular = iris.util._is_circular(pts, 360.0)
                else:
                    circular = False
                if coord.name == "flight_level":
                    icoord = DimCoord(points=pts, units=coord_units, long_name=long_name)
                else:
                    icoord = DimCoord(
                        points=pts,
                        standard_name=coord.name,
                        units=coord_units,
                        coord_system=coord_sys,
                        circular=circular,
                    )
                if coord.name == "height" or coord.name == "altitude":
                    icoord.long_name = long_name
                if coord.name == "time" and "Av or Int period" in field_headings:
                    dt = coord.values - field_headings["Av or Int period"]
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(points=pts[i], standard_name=coord.name, coord_system=coord_sys, units=coord_units)
                if coord.name == "time" and "Av or Int period" in field_headings:
                    dt = coord.values - field_headings["Av or Int period"]
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :]
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = [
            "X",
            "Y",
            "Z",
            "Time",
            "T",
            "Units",
            "Av or Int period",
            "... from [Z]",
            "... to [Z]",
            "X grid origin",
            "Y grid origin",
            "X grid size",
            "Y grid size",
            "X grid resolution",
            "Y grid resolution",
            "Number of field cols",
            "Number of preliminary cols",
            "Number of fields",
            "Number of series",
            "Output format",
        ]

        # Add the Main Headings as attributes.
        for key, value in six.iteritems(header):
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in six.iteritems(field_headings):
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube