Exemple #1
0
def fix_bpch2nc(cube, field, filename):
    """
    An Iris load callback for properly loading the NetCDF files
    created by BPCH2NC (GAMAP v2-12+).

    """
    global _coordcache

    # units
    units = field.unit.strip()
    try:
        cube.units = units
    except ValueError:
        # Try to get equivalent units compatible with udunits.
        # Store original unit as cube attribute
        conform_units = ctm2cf.get_cfcompliant_units(units)
        try:
            cube.units = conform_units
        except ValueError:
            warnings.warn("Invalid udunits2 '{0}'".format(units))
    cube.attributes["ctm_units"] = units

    # a hack for keeping cube's long_name but show var_name in cube summary
    iris.std_names.STD_NAMES[cube.var_name] = {'canonical_units': cube.units}
    cube.standard_name = cube.var_name

    # add spatial coordinates
    modelname = cube.attributes.get('Model')
    res = cube.attributes.get('Delta_Lon'), cube.attributes.get('Delta_Lat')
    nlayers = cube.attributes.get('NLayers')
    cache_key = modelname, res, nlayers

    if _coordcache.get(cache_key) is None:
        ctm_grid = grid.CTMGrid.from_model(modelname, resolution=res)
        coord_names = ('longitude', 'latitude', 'levels')
        lon, lat, lev = irisutil.coord_from_grid(ctm_grid, coord=coord_names)
        _coordcache[cache_key] = {'lon': lon, 'lat': lat, 'lev': lev}
    if cube.ndim == 3:
        cube.add_dim_coord(_coordcache[cache_key]['lon'], 2)
        cube.add_dim_coord(_coordcache[cache_key]['lat'], 1)
        if cube.shape[0] == nlayers:
            cube.add_dim_coord(_coordcache[cache_key]['lev'], 0)

    # add time scalar coordinate (get info from global attributes)
    tstart = (str(cube.attributes['Start_Date']) +
              str(cube.attributes['Start_Time']))
    tstart = datetime.datetime.strptime(tstart, "%Y%m%d%H")
    tstart = timeutil.time2tau(tstart)
    tend = (str(cube.attributes['End_Date']) +
            str(cube.attributes['End_Time']))
    tend = datetime.datetime.strptime(tend, "%Y%m%d%H")
    tend = timeutil.time2tau(tend)
    time_coord = iris.coords.DimCoord(points=tstart,
                                      bounds=(tstart, tend),
                                      standard_name='time',
                                      units=irisutil.CTM_TIME_UNIT_IRIS)
    cube.add_aux_coord(time_coord)

    # attributes
    # TODO: don't remove all attributes
    cube.attributes.clear()

    # handle 2D fields (remove 1-sized 1st dimension)
    if cube.ndim == 3 and cube.shape[0] == 1:
        #dummy_coord = iris.coords.DimCoord([0], long_name='dummy',
        #                                   bounds=[0, 1])
        #cube.add_dim_coord(dummy_coord, 0)
        return cube.slices(range(1, cube.ndim)).next()
Exemple #2
0
def fix_bpch2coards(cube, field, filename):
    """
    An Iris load callback for properly loading the NetCDF files
    created by BPCH2COARDS (GAMAP v2-17+).

    """
    global _coordcache2

    # units
    units = field.units
    try:
        cube.units = units
    except ValueError:
        # Try to get equivalent units compatible with udunits.
        # Store original unit as cube attribute
        conform_units = ctm2cf.get_cfcompliant_units(units)
        try:
            cube.units = conform_units
        except ValueError:
            warnings.warn("Invalid udunits2 '{0}'".format(units))
    cube.attributes["ctm_units"] = units

    # a hack for keeping cube's long_name but show var_name in cube summary
    iris.std_names.STD_NAMES[cube.var_name] = {'canonical_units': cube.units}
    cube.standard_name = cube.var_name

    # attributes
    # TODO: don't remove all attributes
    cube.attributes.clear()

    # longitude coordinate (non strictly monotonic) degrees -> degrees_east
    try:
        lon = cube.coord('longitude')
        lon_dim = cube.coord_dims(lon)[0]
        cache_key = 'longitude', filename

        if _coordcache2.get(cache_key) is None:
            west_ind = np.nonzero(lon.points >= 180.)
            lon.points[west_ind] = -1. * (360. - lon.points)
            lon.units = 'degrees_east'
            _coordcache2[cache_key] = iris.coords.DimCoord.from_coord(lon)

        cube.remove_coord(lon)
        cube.add_dim_coord(_coordcache2[cache_key], lon_dim)
    except iris.exceptions.CoordinateNotFoundError:
        pass

    # levels coordinate
    # 'sigma_level' depreciated in the CF standard (not supported by UDUNITS)
    try:
        lev = cube.coord('Eta Centers')
        lev_dim = cube.coord_dims(lev)[0]
        lev_std_name = 'atmosphere_hybrid_sigma_pressure_coordinate'
        cache_key = lev_std_name, filename

        if _coordcache2.get(cache_key) is None:
            lev.standard_name = lev_std_name
            lev.units = iris.unit.Unit('1')
            d = nc.Dataset(filename)
            elev = d.variables['edge'][:]
            lev.bounds = np.column_stack((elev[:-1], elev[1:]))
            _coordcache2[cache_key] = iris.coords.DimCoord.from_coord(lev)

        cube.remove_coord(lev)
        cube.add_dim_coord(_coordcache2[cache_key], lev_dim)
    except iris.exceptions.CoordinateNotFoundError:
        pass

    # time: dimension -> scalar coordinate (+ add bounds)
    try:
        time_coord = cube.coord('time')
        time_dim = cube.coord_dims(time_coord)[0]

        with iris.FUTURE.context(cell_datetime_objects=True):
            tstart = time_coord.cell(0).point
        delta_t = time_coord.attributes.pop('delta_t')
        tend = tstart + timeutil.strp_relativedelta(delta_t)
        time_coord.bounds = [timeutil.time2tau(tstart),
                             timeutil.time2tau(tend)]
        if cube.shape[time_dim] == 1:
            slices_dims = [d for d in range(cube.ndim) if d != time_dim]
            return cube.slices(slices_dims).next()
    except iris.exceptions.CoordinateNotFoundError:
        pass
Exemple #3
0
def _datablock_to_cube(datablock, dim_coords_and_dims=None,
                       aux_coords_and_dims=None, aux_factories=None,
                       coords_from_model=True, errcoord='error',
                       **kwargs):
    """
    Create a :class:`iris.cubes.Cube` object given a datablock dict.
    """
    # TODO: cell methods, aux_factories

    # Create cube with data or data proxy, but no metadata
    if isinstance(datablock['data'], bpch.BPCHDataProxy):
        data = biggus.OrthoArrayAdapter(datablock['data'])
    else:
        data = datablock['data']
    cube = iris.cube.Cube(data)

    # units
    units = datablock['unit'].strip()
    # Try to get equivalent units compatible with udunits.
    # Store original unit as cube attribute
    conform_units = ctm2cf.get_cfcompliant_units(units)
    try:
        cube.units = conform_units
    except ValueError:
        warnings.warn("Invalid udunits2 '{0}'".format(units))
    cube.attributes["ctm_units"] = units

    # set the cube's name ('category__name')
    name = "__".join([datablock['category'], datablock['name']])
    name = ctm2cf.get_valid_varname(name)
    # a hack for keeping cube's long_name but show var_name in cube summary
    iris.std_names.STD_NAMES[name] = {'canonical_units': cube.units}
    cube.standard_name = name
    cube.var_name = name
    cube.long_name = datablock['tracerinfo'].get('full_name')

    # set coordinates from datablock metadata
    if coords_from_model:
        ctm_grid = grid.CTMGrid.from_model(datablock['modelname'],
                                           resolution=datablock['resolution'])
        coord_names = ('longitude', 'latitude', 'levels')
        lon, lat, lev = coord_from_grid(ctm_grid, coord=coord_names)
        cube.add_dim_coord(lon, 0)
        cube.add_dim_coord(lat, 1)
        if cube.ndim > 2:
            add_dim_aux_coord(cube, lev, 2, err='pass')

    # add given dimension coordinates if any
    if dim_coords_and_dims:
        for coord, dim in dim_coords_and_dims:
            add_dim_aux_coord(cube, coord, dim, err=errcoord)

    # add given auxiliary coordinates if any
    if aux_coords_and_dims:
        for coord, dim in aux_coords_and_dims:
            add_dim_aux_coord(cube, coord, dim, err=errcoord)

    # add given aux coordinates factories if any
    if aux_factories:
        for aux_factory in aux_factories:
            cube.add_aux_factory(aux_factory)

    # time coordinates
    point = timeutil.time2tau(datablock['times'][0])
    bounds = [timeutil.time2tau(t) for t in datablock['times']]
    time_coord = iris.coords.DimCoord(points=point,
                                      bounds=bounds,
                                      standard_name='time',
                                      units=CTM_TIME_UNIT_IRIS)
    cube.add_aux_coord(time_coord)

    # attributes
    # TODO: proper attribute handling (several issues)
    # 1. avoid duplicating the space domain information
    # 2. keep somewhere the metadata in its original format (for further export)
    # 3. deal with simple types only (e.g., tuple not valid as cube attribute)
    # 4. limit the number of cube attributes (slows down the cube merging)
    cube.attributes['model'] = datablock.get('modelname')

    #if isinstance(datablock['tracerinfo'], dict):
    #    cube.attributes.update(datablock['tracerinfo'])
    #    cube.attributes.pop('unit')
    #for attr_name in ('name', 'number', 'category', 'modelname', 'resolution'):
    #    cube.attributes[attr_name] = datablock.get(attr_name, None)
    cube.attributes.update(kwargs)

    return cube