def lon_lat_coords(self, lons, lats, cs=None):
     if cs is None:
         cs = self.geog_cs
     return (coords.AuxCoord(lons, 'longitude', units='degrees',
                             coord_system=cs),
             coords.AuxCoord(lats, 'latitude', units='degrees',
                             coord_system=cs))
예제 #2
0
def upscale_callback(cube, field, filename):

    str_split = re.split("/", filename)

    # Create new coordinates
    job_coord = coords.AuxCoord(str_split[9],
                                long_name='JobID',
                                units='no_unit')
    cube.add_aux_coord(job_coord)
    exp_coord = coords.AuxCoord(str_split[5],
                                long_name='Experiment',
                                units='no_unit')
    cube.add_aux_coord(exp_coord)
    stash_coord = coords.AuxCoord(str_split[8],
                                  long_name='Stash',
                                  units='no_unit')
    cube.add_aux_coord(stash_coord)
    res_coord = coords.AuxCoord(str_split[6],
                                long_name='Resolution',
                                units='no_unit')
    cube.add_aux_coord(res_coord)

    ### Add season, year coordinate categorisations
    if str_split[7] == 'monthly':
        seasons = ['djf', 'mam', 'jja', 'son']
        iris.coord_categorisation.add_year(cube, 'time', name='year')
        iris.coord_categorisation.add_month_number(cube, 'time', name='month')
        iris.coord_categorisation.add_season(cube,
                                             'time',
                                             name='clim_season',
                                             seasons=seasons)
        iris.coord_categorisation.add_season_year(cube,
                                                  'time',
                                                  name='season_year',
                                                  seasons=seasons)
예제 #3
0
def load_Nexrad(filenames,variable):
    import iris
    from iris.cube import CubeList
    from iris import coords
    from datetime import datetime,timedelta
    from os.path import basename
    cube_list=[]
    for filename in filenames:
        cube=iris.load_cube(filename,'variable')#'equivalent_reflectivity_factor'
        #time=iris.load_cube(filename,'time')
        timestring=basename(filename)[12:27]
        time_point=datetime.strptime("".join(timestring), "%Y%m%d_%H%M%S")
        time_days=(time_point - datetime(1970,1,1)).total_seconds() / timedelta(1).total_seconds()
        x=iris.load_cube(filename,'X-coordinate in Cartesian system')
        y=iris.load_cube(filename,'Y-coordinate in Cartesian system')
        z=iris.load_cube(filename,'Z-coordinate in Cartesian system')
        lat=iris.load_cube(filename,'Latitude grid')
        lon=iris.load_cube(filename,'Longitude grid')
        cube.remove_coord('time')
        cube.add_dim_coord(coords.DimCoord(time_days, standard_name=None, long_name='time', var_name='time', units='days since 1970-01-01', bounds=None, attributes=None, coord_system=None, circular=False),0)
        cube.add_dim_coord(coords.DimCoord(x.data, standard_name=None, long_name='x', var_name='x', units='m', bounds=None, attributes=None, coord_system=None, circular=False),2)
        cube.add_dim_coord(coords.DimCoord(y.data, standard_name=None, long_name='y', var_name='y', units='m', bounds=None, attributes=None, coord_system=None, circular=False),3)
        cube.add_dim_coord(coords.DimCoord(z.data, standard_name=None, long_name='z', var_name='z', units='m', bounds=None, attributes=None, coord_system=None, circular=False),1)
        cube.add_aux_coord(coords.AuxCoord(lat.data, standard_name='latitude', long_name='latitude', var_name='latitude', units='degrees', bounds=None, attributes=None, coord_system=None),(2,3))
        cube.add_aux_coord(coords.AuxCoord(lon.data, standard_name='longitude', long_name='longitude', var_name='longitude', units='degrees', bounds=None, attributes=None, coord_system=None),(2,3))
        cube_list.append(cube)
    for member in cube_list:
        member.attributes={}
    variable_cubes=CubeList(cube_list)
    variable_cube=variable_cubes.concatenate_cube()
    
    return variable_cube
예제 #4
0
    def _test_rotated(
        self,
        grid_north_pole_latitude=90,
        grid_north_pole_longitude=0,
        north_pole_grid_longitude=0,
    ):
        cs = ics.RotatedGeogCS(
            grid_north_pole_latitude,
            grid_north_pole_longitude,
            north_pole_grid_longitude,
        )
        glon = coords.AuxCoord([359, 1],
                               "grid_longitude",
                               units="degrees",
                               coord_system=cs)
        glat = coords.AuxCoord([0, 0],
                               "grid_latitude",
                               units="degrees",
                               coord_system=cs)
        expected_path = Path([[-1, 0], [1, 0]], [Path.MOVETO, Path.LINETO])

        plt.figure()
        lines = iplt.plot(glon, glat)
        # Matplotlib won't immediately set up the correct transform to allow us
        # to compare paths. Calling set_global(), which calls set_xlim() and
        # set_ylim(), will trigger Matplotlib to set up the transform.
        ax = plt.gca()
        ax.set_global()

        crs = cs.as_cartopy_crs()
        self.check_paths(expected_path, crs, lines, ax)
예제 #5
0
파일: iterate.py 프로젝트: wilbertcs/iris
    def setup(self):
        local_cube = general_cube.copy()
        coord_a = coords.AuxCoord(points=data_1d, long_name="a")
        coord_b = coords.AuxCoord(points=data_1d, long_name="b")
        self.coord_names = (coord.long_name for coord in (coord_a, coord_b))

        local_cube.add_aux_coord(coord_a, 0)
        local_cube.add_aux_coord(coord_b, 1)
        self.cube = local_cube
예제 #6
0
def _generate_ocean_cube():
    """
    Returns a realistic 3d ocean cube with an extended time range.
    """
    cube_list = iris.cube.CubeList()
    lower_bound = 0
    upper_bound = 70
    period = 70
    for i in range(0, 100):
        data = np.arange(70 * 9 * 11).reshape((70, 9, 11))
        lat_pts = np.arange(9 * 11).reshape(9, 11)
        lon_pts = np.arange(9 * 11).reshape(9, 11)

        time_pts = np.linspace(lower_bound, upper_bound - 1, 70)
        cell_index_first = np.linspace(0, 8, 9)
        cell_index_second = np.linspace(0, 10, 11)

        lat = icoords.AuxCoord(
            lat_pts,
            standard_name="grid_latitude",
            units="degrees",
        )
        lon = icoords.AuxCoord(
            lon_pts,
            standard_name="grid_longitude",
            units="degrees",
        )
        time = icoords.DimCoord(
            time_pts, standard_name="time",
            units="days since 1970-01-01 00:00:00"
        )
        cell_index_first = icoords.DimCoord(
            cell_index_first, units=cf_units.Unit('1'),
            long_name='cell index along first dimension', var_name='i'
        )

        cell_index_second = icoords.DimCoord(
            cell_index_second, units=cf_units.Unit('1'),
            long_name='cell index along second dimension', var_name='j'
        )

        cube = iris.cube.Cube(
            data,
            standard_name='surface_downward_mass_flux_of_carbon'
                          '_dioxide_expressed_as_carbon',
            units=cf_units.Unit('kg m-2 s-1'),
            dim_coords_and_dims=[(time, 0),
                                 (cell_index_first, 1),
                                 (cell_index_second, 2)],
            aux_coords_and_dims=[(lat, (1, 2)), (lon, (1, 2))],
            attributes={"source": "Iris test case"},
        )
        lower_bound = lower_bound + 70
        upper_bound = upper_bound + 70
        period = period + 70
        cube_list.append(cube)
    return cube_list
예제 #7
0
def NAME_to_cube(filenames, callback):
    """Returns a generator of cubes given a list of filenames and a callback."""

    for filename in filenames:
        header, column_headings, data_arrays = load_NAME_III(filename)

        for i, data_array in enumerate(data_arrays):
            # turn the dictionary of column headers with a list of header information for each field into a dictionary of
            # headers for just this field. Ignore the first 4 columns of grid position (data was located with the data array).
            field_headings = dict([(k, v[i + 4]) for k, v in column_headings.iteritems()])

            # make an cube
            cube = iris.cube.Cube(data_array)

            # define the name and unit
            name = ('%s %s' % (field_headings['species'], field_headings['quantity'])).upper().replace(' ', '_')
            cube.rename(name)
            # Some units are badly encoded in the file, fix this by putting a space in between. (if gs is not found, then the
            # string will be returned unchanged)
            cube.units = field_headings['unit'].replace('gs', 'g s')

            # define and add the singular coordinates of the field (flight level, time etc.)
            cube.add_aux_coord(icoords.AuxCoord(field_headings['z_level'], long_name='flight_level', units='1'))

            # define the time unit and use it to serialise the datetime for the time coordinate
            time_unit = iris.unit.Unit('hours since epoch', calendar=iris.unit.CALENDAR_GREGORIAN)
            time_coord = icoords.AuxCoord(time_unit.date2num(field_headings['time']), standard_name='time', units=time_unit)
            cube.add_aux_coord(time_coord)

            # build a coordinate system which can be referenced by latitude and longitude coordinates
            lat_lon_coord_system = icoord_systems.GeogCS(6371229)

            # build regular latitude and longitude coordinates which have bounds
            start = header['X grid origin'] + header['X grid resolution']
            step = header['X grid resolution']
            count = header['X grid size']
            pts = start + np.arange(count, dtype=np.float32) * step
            lon_coord = icoords.DimCoord(pts, standard_name='longitude', units='degrees', coord_system=lat_lon_coord_system)
            lon_coord.guess_bounds()

            start = header['Y grid origin'] + header['Y grid resolution']
            step = header['Y grid resolution']
            count = header['Y grid size']
            pts = start + np.arange(count, dtype=np.float32) * step
            lat_coord = icoords.DimCoord(pts, standard_name='latitude', units='degrees', coord_system=lat_lon_coord_system)
            lat_coord.guess_bounds()

            # add the latitude and longitude coordinates to the cube, with mappings to data dimensions
            cube.add_dim_coord(lat_coord, 0)
            cube.add_dim_coord(lon_coord, 1)

            # implement standard iris callback capability. Although callbacks are not used in this example, the standard
            # mechanism for a custom loader to implement a callback is shown:
            cube = iris.io.run_callback(callback, cube, [header, field_headings, data_array], filename)

            # yield the cube created (the loop will continue when the next() element is requested)
            yield cube
예제 #8
0
 def lon_lat_coords(self, lons, lats, cs=None):
     if cs is None:
         cs = self.geog_cs
     return (
         coords.AuxCoord(lons,
                         "longitude",
                         units="degrees",
                         coord_system=cs),
         coords.AuxCoord(lats, "latitude", units="degrees",
                         coord_system=cs),
     )
예제 #9
0
파일: stock.py 프로젝트: mcmweb80/iris
def hybrid_height():
    """
    Returns a two-dimensional (Z, X), hybrid-height cube.

    >>> print hybrid_height()
    TODO: Update!
    air_temperature                     (level_height: 3; *ANONYMOUS*: 4)
         Dimension coordinates:
              level_height                           x               -
         Auxiliary coordinates:
              model_level_number                     x               -
              sigma                                  x               -
              surface_altitude                       -               x
         Derived coordinates:
              altitude                               x               x

    >>> print hybrid_height().data
    [[[ 0  1  2  3]
      [ 4  5  6  7]
      [ 8  9 10 11]]

    """
    data = np.arange(12, dtype='i8').reshape((3, 4))

    orography = icoords.AuxCoord([10, 25, 50, 5],
                                 standard_name='surface_altitude',
                                 units='m')
    model_level = icoords.AuxCoord([2, 1, 0],
                                   standard_name='model_level_number')
    level_height = icoords.DimCoord([100, 50, 10],
                                    long_name='level_height',
                                    units='m',
                                    attributes={'positive': 'up'},
                                    bounds=[[150, 75], [75, 20], [20, 0]])
    sigma = icoords.AuxCoord([0.8, 0.9, 0.95],
                             long_name='sigma',
                             bounds=[[0.7, 0.85], [0.85, 0.97], [0.97, 1.0]])
    hybrid_height = iris.aux_factory.HybridHeightFactory(
        level_height, sigma, orography)

    cube = iris.cube.Cube(data,
                          standard_name='air_temperature',
                          units='K',
                          dim_coords_and_dims=[(level_height, 0)],
                          aux_coords_and_dims=[(orography, 1),
                                               (model_level, 0), (sigma, 0)],
                          aux_factories=[hybrid_height])
    return cube
예제 #10
0
 def setup(self):
     self.cube_list = cube.CubeList()
     for i in np.arange(2):
         i_cube = general_cube.copy()
         i_coord = coords.AuxCoord([i])
         i_cube.add_aux_coord(i_coord)
         self.cube_list.append(i_cube)
예제 #11
0
    def setup(self):
        self.cube_a = general_cube.copy()
        self.cube_b = general_cube.copy()

        aux_coord = coords.AuxCoord(data_1d)
        self.cube_a.add_aux_coord(aux_coord, 0)
        self.cube_b.add_aux_coord(aux_coord, 1)
예제 #12
0
def lagged_ensemble_callback(cube, field, filename):
    # Add our own realization coordinate if it doesn't already exist.
    if not cube.coords('realization'):
        realization = np.int32(filename[-6:-3])
        ensemble_coord = icoords.AuxCoord(realization,
                                          standard_name='realization')
        cube.add_aux_coord(ensemble_coord)
def my_callback(cube, field, filename):

    """
    Function to:
     * Add an ensemble coordinate
     * Rename cubes
     * Remove unwanted coordinates
    """

    unwanted_keys = ['Number of field cols',
                     'Number of preliminary cols',
                     'Run time',
                     'Met data']
    for key in unwanted_keys:
        if key in cube.attributes:
            del cube.attributes[key]

    if cube.long_name in _MET_SHORT_NAME:
        cube.attributes['short_name'] = _MET_SHORT_NAME[cube.long_name]
        cube.rename(cube.attributes['short_name'])

    if not cube.coords('realization'):
        ensemble_number = filename.strip('.txt').split('_')[-1]
        realization = ensemble_number[1:]
        ensemble_coord = icoords.AuxCoord(realization,
                                          standard_name='realization')
        cube.add_aux_coord(ensemble_coord)
예제 #14
0
파일: util.py 프로젝트: tangming331/baspy
def add_scalar_coords(cube, coord_dict=None):
    '''
    Useful for adding, e.g., longitude and latitude to a 1D time series cube at a specific location

    coord_dict needs to be a dictionary of the form:
        {'key1': ['scalar1', 'units1'], 'key2': ['scalar2', 'units2']}"

    '''

    if (type(coord_dict) != dict):
        raise ValueError("coord_dict needs to be a dictionary of the form: \
                            {'key1': ['scalar1', 'units1'], 'key2': ['scalar2', 'units2']}"
                         )

    import iris.coords as coords

    for key, values in coord_dict.iteritems():

        if (type(values) != list) | (len(values) != 2):
            ValueError(
                "coord_dict must of the form: {'key1': ['scalar1', 'units1'], 'key2': ['scalar2', 'units2']}"
            )

        scalar = values[0]
        units = values[1]
        new_coord = coords.AuxCoord(scalar, long_name=key, units=units)
        cube.add_aux_coord(new_coord)

    return cube
예제 #15
0
def main():
    fname = iris.sample_data_path("colpex.pp")

    # The list of phenomena of interest
    phenomena = ["air_potential_temperature", "air_pressure"]

    # Define the constraint on standard name and model level
    constraints = [
        iris.Constraint(phenom, model_level_number=1) for phenom in phenomena
    ]

    air_potential_temperature, air_pressure = iris.load_cubes(
        fname, constraints
    )

    # Define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(1000, long_name="P0", units="hPa")
    # Convert reference pressure 'p0' into the same units as 'air_pressure'
    p0.convert_units(air_pressure.units)

    # Calculate Exner pressure
    exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0)
    # Set the name (the unit is scalar)
    exner_pressure.rename("exner_pressure")

    # Calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # Set the name (the unit is K)
    air_temperature.rename("air_temperature")

    # Now create an iterator which will give us lat lon slices of
    # exner pressure and air temperature in the form
    # (exner_slice, air_temp_slice).
    lat_lon_slice_pairs = iris.iterate.izip(
        exner_pressure,
        air_temperature,
        coords=["grid_latitude", "grid_longitude"],
    )

    # For the purposes of this example, we only want to demonstrate the first
    # plot.
    lat_lon_slice_pairs = [next(lat_lon_slice_pairs)]

    plt.figure(figsize=(8, 4))
    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to
        # overlap. Therefore, limit the number of ticks.
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        iplt.show()
예제 #16
0
 def test_boundmode_multidim(self):
     # Test exception translation.
     # We can't get contiguous bounded grids from multi-d coords.
     cube = self.bounded_cube
     cube.remove_coord("latitude")
     cube.add_aux_coord(coords.AuxCoord(points=cube.data,
                                        standard_name='latitude',
                                        units='degrees'), [0, 1])
     with self.assertRaises(ValueError):
         iplt.pcolormesh(cube, coords=['longitude', 'latitude'])
def cop_metadata_callback(cube, field, filename):
    """ A function which adds an "Experiment" coordinate which comes from the filename. """

    # Extract the experiment name (such as a1b or e1) from the filename (in this case it is just the parent folder's name)
    containing_folder = os.path.dirname(filename)
    experiment_label = os.path.basename(containing_folder)

    # Create a coordinate with the experiment label in it
    exp_coord = coords.AuxCoord(experiment_label, long_name='Experiment', units='no_unit')

    # and add it to the cube
    cube.add_aux_coord(exp_coord)
예제 #18
0
    def setup(self):
        repeat_number = 10
        repeat_range = range(int(ARTIFICIAL_DIM_SIZE / repeat_number))
        array_repeat = np.repeat(repeat_range, repeat_number)
        array_unique = np.arange(len(array_repeat))

        coord_repeat = coords.AuxCoord(points=array_repeat, long_name="repeat")
        coord_unique = coords.DimCoord(points=array_unique, long_name="unique")

        local_cube = general_cube.copy()
        local_cube.add_aux_coord(coord_repeat, 0)
        local_cube.add_dim_coord(coord_unique, 0)
        self.cube = local_cube
예제 #19
0
def add_aux_coordinates(filename,variable,variable_cube,variable_dict, coord_dict,domain,**kwargs):
    from iris import load_cube,coords
    coord_system=None


    latitude=load_cube(filename,'GLAT').core_data()
    longitude=load_cube(filename,'GLON').core_data()
    lat_coord=coords.AuxCoord(latitude, standard_name='latitude', long_name='latitude', var_name='latitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)
    lon_coord=coords.AuxCoord(longitude, standard_name='longitude', long_name='longitude', var_name='longitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)

    if (variable_dict[variable]==3):                
         variable_cube.add_aux_coord(lon_coord,(1,2))
         variable_cube.add_aux_coord(lat_coord,(1,2))
    elif (variable_dict[variable]==2):
            variable_cube.add_aux_coord(lon_coord,(0,1))                
            variable_cube.add_aux_coord(lat_coord,(0,1))

    # add_coordinates=kwargs.pop('add_coordinates')
    # if type(add_coordinates)!=list:
    #     add_coordinates1=add_coordinates
    #     add_coordinates=[]
    #     add_coordinates.append(add_coordinates1)
    # for coordinate in add_coordinates:
    #     if coordinate=='latlon': 
    #         latitude=load_cube(filename,'GLAT').data
    #         longitude=load_cube(filename,'GLON').data
    #         lat_coord=coords.AuxCoord(latitude, standard_name='latitude', long_name='latitude', var_name='latitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)
    #         lon_coord=coords.AuxCoord(longitude, standard_name='longitude', long_name='longitude', var_name='longitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)

    #         if (variable_dict[variable]==3):                
    #             variable_cube.add_aux_coord(lon_coord,(1,2))
    #             variable_cube.add_aux_coord(lat_coord,(1,2))
    #         elif (variable_dict[variable]==2):
    #             variable_cube.add_aux_coord(lon_coord,(0,1))                
    #             variable_cube.add_aux_coord(lat_coord,(0,1))

    return variable_cube
예제 #20
0
def main():
    fname = iris.sample_data_path('colpex.pp')

    # the list of phenomena of interest
    phenomena = ['air_potential_temperature', 'air_pressure']

    # define the constraint on standard name and model level
    constraints = [
        iris.Constraint(phenom, model_level_number=1) for phenom in phenomena
    ]

    air_potential_temperature, air_pressure = iris.load_strict(
        fname, constraints)

    # define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(100000, long_name='P0', units='Pa')

    # calculate Exner pressure
    exner_pressure = (air_pressure / p0)**(287.05 / 1005.0)
    # set the standard name (the unit is scalar)
    exner_pressure.rename('exner_pressure')

    # calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # set phenomenon definition and unit
    air_temperature.standard_name = 'air_temperature'
    air_temperature.units = 'K'

    # Now create an iterator which will give us lat lon slices of exner pressure and air temperature in
    # the form [exner_slice, air_temp_slice]
    lat_lon_slice_pairs = itertools.izip(
        exner_pressure.slices(['grid_latitude', 'grid_longitude']),
        air_temperature.slices(['grid_latitude', 'grid_longitude']))
    plt.figure(figsize=(8, 4))

    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to overlap. Therefore, limit the number of ticks
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        plt.show()

        # For the purposes of this example, break after the first loop - we only want to demonstrate the first plot
        break
예제 #21
0
    def setup(self):
        self.coord_name = "test"
        coord_bounds = np.array([data_1d - 1, data_1d + 1]).transpose()
        aux_coord = coords.AuxCoord(
            long_name=self.coord_name,
            points=data_1d,
            bounds=coord_bounds,
            units="days since 1970-01-01",
            climatological=True,
        )

        # Variables needed by the ComponentCommon base class.
        self.cube_kwargs = {"aux_coords_and_dims": [(aux_coord, 0)]}
        self.add_method = cube.Cube.add_aux_coord
        self.add_args = (aux_coord, (0))

        self.setup_common()
예제 #22
0
def cop_metadata_callback(cube, field, filename):
    """
    A function which adds an "Experiment" coordinate which comes from the
    filename.
    """

    # Extract the experiment name (such as A1B or E1) from the filename (in
    # this case it is just the start of the file name, before the first ".").
    fname = os.path.basename(filename)  # filename without path.
    experiment_label = fname.split(".")[0]

    # Create a coordinate with the experiment label in it...
    exp_coord = coords.AuxCoord(experiment_label,
                                long_name="Experiment",
                                units="no_unit")

    # ...and add it to the cube.
    cube.add_aux_coord(exp_coord)
예제 #23
0
    def setup(self):
        coord = coords.AuxCoord(points=data_1d, units="m")
        self.hybrid_factory = aux_factory.HybridHeightFactory(delta=coord)

        # Variables needed by the ComponentCommon base class.
        self.cube_kwargs = {
            "aux_coords_and_dims": [(coord, 0)],
            "aux_factories": [self.hybrid_factory],
        }

        self.setup_common()

        # Variables needed by the overridden time_add benchmark in this subclass.
        cube_w_coord = self.cube.copy()
        [
            cube_w_coord.remove_aux_factory(i)
            for i in cube_w_coord.aux_factories
        ]
        self.cube_w_coord = cube_w_coord
예제 #24
0
def callback(cube, field, filename):
    """
    Adds useful auxillary coordinates to the cube

    """

    global __current_dataset

    filename = re.split('/', filename)[-1]
    split_str = re.split('_', filename)

    filename_strucuture = dataset_dictionaries[__current_dataset][
        'FilenameStructure']
    long_names = filename_strucuture.split('_')

    whitelist_auxcoords = ['Experiment', 'Model', 'RunID']

    for i in range(0, len(split_str)):
        if long_names[i] in whitelist_auxcoords:
            new_coord = coords.AuxCoord(split_str[i],
                                        long_name=long_names[i],
                                        units='no_unit')
            cube.add_aux_coord(new_coord)

    ### Add additional time coordinate categorisations
    if (len(cube.coords(axis='t')) > 0):
        time_name = cube.coord(axis='t').var_name
        iris.coord_categorisation.add_year(cube, time_name, name='year')
        iris.coord_categorisation.add_month_number(cube,
                                                   time_name,
                                                   name='month')
        seasons = ['djf', 'mam', 'jja', 'son']
        iris.coord_categorisation.add_season(cube,
                                             time_name,
                                             name='clim_season',
                                             seasons=seasons)
        iris.coord_categorisation.add_season_year(cube,
                                                  time_name,
                                                  name='season_year',
                                                  seasons=seasons)
예제 #25
0
파일: stock.py 프로젝트: mcmweb80/iris
def realistic_4d():
    """
    Returns a realistic 4d cube.

    >>> print repr(realistic_4d())
    <iris 'Cube' of air_potential_temperature (time: 6; model_level_number: 70; grid_latitude: 100; grid_longitude: 100)>

    """
    # the stock arrays were created in Iris 0.8 with:
    #    >>> fname = iris.sample_data_path('PP', 'COLPEX', 'theta_and_orog_subset.pp')
    #    >>> theta = iris.load_cube(fname, 'air_potential_temperature')
    #    >>> for coord in theta.coords():
    #    ...  print coord.name, coord.has_points(), coord.has_bounds(), coord.units
    #    ...
    #    grid_latitude True True degrees
    #    grid_longitude True True degrees
    #    level_height True True m
    #    model_level True False 1
    #    sigma True True 1
    #    time True False hours since 1970-01-01 00:00:00
    #    source True False no_unit
    #    forecast_period True False hours
    #    >>> arrays = []
    #    >>> for coord in theta.coords():
    #    ...  if coord.has_points(): arrays.append(coord.points)
    #    ...  if coord.has_bounds(): arrays.append(coord.bounds)
    #    >>> arrays.append(theta.data)
    #    >>> arrays.append(theta.coord('sigma').coord_system.orography.data)
    #    >>> np.savez('stock_arrays.npz', *arrays)

    data_path = os.path.join(os.path.dirname(__file__), 'stock_arrays.npz')
    r = np.load(data_path)
    # sort the arrays based on the order they were originally given. The names given are of the form 'arr_1' or 'arr_10'
    _, arrays = zip(*sorted(r.iteritems(), key=lambda item: int(item[0][4:])))

    lat_pts, lat_bnds, lon_pts, lon_bnds, level_height_pts, \
    level_height_bnds, model_level_pts, sigma_pts, sigma_bnds, time_pts, \
    _source_pts, forecast_period_pts, data, orography = arrays

    ll_cs = RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))

    lat = icoords.DimCoord(lat_pts,
                           standard_name='grid_latitude',
                           units='degrees',
                           bounds=lat_bnds,
                           coord_system=ll_cs)
    lon = icoords.DimCoord(lon_pts,
                           standard_name='grid_longitude',
                           units='degrees',
                           bounds=lon_bnds,
                           coord_system=ll_cs)
    level_height = icoords.DimCoord(level_height_pts,
                                    long_name='level_height',
                                    units='m',
                                    bounds=level_height_bnds,
                                    attributes={'positive': 'up'})
    model_level = icoords.DimCoord(model_level_pts,
                                   standard_name='model_level_number',
                                   units='1',
                                   attributes={'positive': 'up'})
    sigma = icoords.AuxCoord(sigma_pts,
                             long_name='sigma',
                             units='1',
                             bounds=sigma_bnds)
    orography = icoords.AuxCoord(orography,
                                 standard_name='surface_altitude',
                                 units='m')
    time = icoords.DimCoord(time_pts,
                            standard_name='time',
                            units='hours since 1970-01-01 00:00:00')
    forecast_period = icoords.DimCoord(forecast_period_pts,
                                       standard_name='forecast_period',
                                       units='hours')

    hybrid_height = iris.aux_factory.HybridHeightFactory(
        level_height, sigma, orography)

    cube = iris.cube.Cube(data,
                          standard_name='air_potential_temperature',
                          units='K',
                          dim_coords_and_dims=[(time, 0), (model_level, 1),
                                               (lat, 2), (lon, 3)],
                          aux_coords_and_dims=[(orography, (2, 3)),
                                               (level_height, 1), (sigma, 1),
                                               (forecast_period, None)],
                          attributes={'source': 'Iris test case'},
                          aux_factories=[hybrid_height])
    return cube
예제 #26
0
파일: __init__.py 프로젝트: wilbertcs/iris
def realistic_4d():
    """
    Returns a realistic 4d cube.

    >>> print(repr(realistic_4d()))
    <iris 'Cube' of air_potential_temperature (time: 6; model_level_number: 70;
    grid_latitude: 100; grid_longitude: 100)>

    """
    data_path = tests.get_data_path(("stock", "stock_arrays.npz"))
    if not os.path.isfile(data_path):
        raise IOError("Test data is not available at {}.".format(data_path))
    r = np.load(data_path)
    # sort the arrays based on the order they were originally given.
    # The names given are of the form 'arr_1' or 'arr_10'
    _, arrays = zip(*sorted(r.items(), key=lambda item: int(item[0][4:])))

    (
        lat_pts,
        lat_bnds,
        lon_pts,
        lon_bnds,
        level_height_pts,
        level_height_bnds,
        model_level_pts,
        sigma_pts,
        sigma_bnds,
        time_pts,
        _source_pts,
        forecast_period_pts,
        data,
        orography,
    ) = arrays

    ll_cs = RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))

    lat = icoords.DimCoord(
        lat_pts,
        standard_name="grid_latitude",
        units="degrees",
        bounds=lat_bnds,
        coord_system=ll_cs,
    )
    lon = icoords.DimCoord(
        lon_pts,
        standard_name="grid_longitude",
        units="degrees",
        bounds=lon_bnds,
        coord_system=ll_cs,
    )
    level_height = icoords.DimCoord(
        level_height_pts,
        long_name="level_height",
        units="m",
        bounds=level_height_bnds,
        attributes={"positive": "up"},
    )
    model_level = icoords.DimCoord(
        model_level_pts,
        standard_name="model_level_number",
        units="1",
        attributes={"positive": "up"},
    )
    sigma = icoords.AuxCoord(sigma_pts,
                             long_name="sigma",
                             units="1",
                             bounds=sigma_bnds)
    orography = icoords.AuxCoord(orography,
                                 standard_name="surface_altitude",
                                 units="m")
    time = icoords.DimCoord(time_pts,
                            standard_name="time",
                            units="hours since 1970-01-01 00:00:00")
    forecast_period = icoords.DimCoord(forecast_period_pts,
                                       standard_name="forecast_period",
                                       units="hours")

    hybrid_height = iris.aux_factory.HybridHeightFactory(
        level_height, sigma, orography)

    cube = iris.cube.Cube(
        data,
        standard_name="air_potential_temperature",
        units="K",
        dim_coords_and_dims=[(time, 0), (model_level, 1), (lat, 2), (lon, 3)],
        aux_coords_and_dims=[
            (orography, (2, 3)),
            (level_height, 1),
            (sigma, 1),
            (forecast_period, None),
        ],
        attributes={"source": "Iris test case"},
        aux_factories=[hybrid_height],
    )
    return cube
예제 #27
0
def NAME_to_cube(filenames, callback):
    """
    Returns a generator of cubes given a list of filenames and a callback.
    """

    for filename in filenames:
        header, column_headings, data_arrays = load_NAME_III(filename)

        for i, data_array in enumerate(data_arrays):
            # turn the dictionary of column headers with a list of header
            # information for each field into a dictionary of headers for just
            # this field. Ignore the first 4 columns of grid position (data was
            # located with the data array).
            field_headings = dict(
                (k, v[i + 4]) for k, v in column_headings.items())

            # make an cube
            cube = iris.cube.Cube(data_array)

            # define the name and unit
            name = "%s %s" % (
                field_headings["species"],
                field_headings["quantity"],
            )
            name = name.upper().replace(" ", "_")
            cube.rename(name)
            # Some units are badly encoded in the file, fix this by putting a
            # space in between. (if gs is not found, then the string will be
            # returned unchanged)
            cube.units = field_headings["unit"].replace("gs", "g s")

            # define and add the singular coordinates of the field (flight
            # level, time etc.)
            cube.add_aux_coord(
                icoords.AuxCoord(
                    field_headings["z_level"],
                    long_name="flight_level",
                    units="1",
                ))

            # define the time unit and use it to serialise the datetime for the
            # time coordinate
            time_unit = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN)
            time_coord = icoords.AuxCoord(
                time_unit.date2num(field_headings["time"]),
                standard_name="time",
                units=time_unit,
            )
            cube.add_aux_coord(time_coord)

            # build a coordinate system which can be referenced by latitude and
            # longitude coordinates
            lat_lon_coord_system = icoord_systems.GeogCS(6371229)

            # build regular latitude and longitude coordinates which have
            # bounds
            start = header["X grid origin"] + header["X grid resolution"]
            step = header["X grid resolution"]
            count = header["X grid size"]
            pts = start + np.arange(count, dtype=np.float32) * step
            lon_coord = icoords.DimCoord(
                pts,
                standard_name="longitude",
                units="degrees",
                coord_system=lat_lon_coord_system,
            )
            lon_coord.guess_bounds()

            start = header["Y grid origin"] + header["Y grid resolution"]
            step = header["Y grid resolution"]
            count = header["Y grid size"]
            pts = start + np.arange(count, dtype=np.float32) * step
            lat_coord = icoords.DimCoord(
                pts,
                standard_name="latitude",
                units="degrees",
                coord_system=lat_lon_coord_system,
            )
            lat_coord.guess_bounds()

            # add the latitude and longitude coordinates to the cube, with
            # mappings to data dimensions
            cube.add_dim_coord(lat_coord, 0)
            cube.add_dim_coord(lon_coord, 1)

            # implement standard iris callback capability. Although callbacks
            # are not used in this example, the standard mechanism for a custom
            # loader to implement a callback is shown:
            cube = iris.io.run_callback(callback, cube,
                                        [header, field_headings, data_array],
                                        filename)

            # yield the cube created (the loop will continue when the next()
            # element is requested)
            yield cube
예제 #28
0
 def create(self):
     return coords.AuxCoord(**self.create_kwargs)
예제 #29
0
    def setup(self):
        data_1d = np.zeros(ARTIFICIAL_DIM_SIZE)
        self.coord = coords.AuxCoord(points=data_1d, units="m")

        self.setup_common()
예제 #30
0
def realistic_4d():
    """
    Returns a realistic 4d cube.

    >>> print(repr(realistic_4d()))
    <iris 'Cube' of air_potential_temperature (time: 6; model_level_number: 70;
    grid_latitude: 100; grid_longitude: 100)>

    """
    data_path = tests.get_data_path(('stock', 'stock_arrays.npz'))
    if not os.path.isfile(data_path):
        raise IOError('Test data is not available at {}.'.format(data_path))
    r = np.load(data_path)
    # sort the arrays based on the order they were originally given.
    # The names given are of the form 'arr_1' or 'arr_10'
    _, arrays = zip(*sorted(r.items(), key=lambda item: int(item[0][4:])))

    lat_pts, lat_bnds, lon_pts, lon_bnds, level_height_pts, \
        level_height_bnds, model_level_pts, sigma_pts, sigma_bnds, time_pts, \
        _source_pts, forecast_period_pts, data, orography = arrays

    ll_cs = RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0))

    lat = icoords.DimCoord(lat_pts,
                           standard_name='grid_latitude',
                           units='degrees',
                           bounds=lat_bnds,
                           coord_system=ll_cs)
    lon = icoords.DimCoord(lon_pts,
                           standard_name='grid_longitude',
                           units='degrees',
                           bounds=lon_bnds,
                           coord_system=ll_cs)
    level_height = icoords.DimCoord(level_height_pts,
                                    long_name='level_height',
                                    units='m',
                                    bounds=level_height_bnds,
                                    attributes={'positive': 'up'})
    model_level = icoords.DimCoord(model_level_pts,
                                   standard_name='model_level_number',
                                   units='1',
                                   attributes={'positive': 'up'})
    sigma = icoords.AuxCoord(sigma_pts,
                             long_name='sigma',
                             units='1',
                             bounds=sigma_bnds)
    orography = icoords.AuxCoord(orography,
                                 standard_name='surface_altitude',
                                 units='m')
    time = icoords.DimCoord(time_pts,
                            standard_name='time',
                            units='hours since 1970-01-01 00:00:00')
    forecast_period = icoords.DimCoord(forecast_period_pts,
                                       standard_name='forecast_period',
                                       units='hours')

    hybrid_height = iris.aux_factory.HybridHeightFactory(
        level_height, sigma, orography)

    cube = iris.cube.Cube(data,
                          standard_name='air_potential_temperature',
                          units='K',
                          dim_coords_and_dims=[(time, 0), (model_level, 1),
                                               (lat, 2), (lon, 3)],
                          aux_coords_and_dims=[(orography, (2, 3)),
                                               (level_height, 1), (sigma, 1),
                                               (forecast_period, None)],
                          attributes={'source': 'Iris test case'},
                          aux_factories=[hybrid_height])
    return cube