def _get_datablock_dim_coords(datablock, coord_cache): """ Get a sequence of (:class:`iris.coords.DimCoord` object, dim) for `datablock`. `coord_cache` is a dictionary serving as a cache to save memory and CPU while assigning coordinates to the created iris cubes. """ cache_fields = ('modelname', 'resolution', 'origin', 'shape') cache_key = '_'.join((str(datablock[f]) for f in cache_fields)) if cache_key in coord_cache.keys(): return coord_cache[cache_key] ctm_grid = grid.CTMGrid.from_model(datablock['modelname'], resolution=datablock['resolution']) if datablock['origin'] != (1, 1, 1): origin, shape = datablock['origin'], datablock['shape'] shape3 = np.pad(shape, (0, len(origin) - len(shape)), 'constant') imin = np.array(origin) - 1 imax = imin + np.array(shape3) region_box = zip(imin, imax) else: region_box = None lon, lat, lev = irisutil.coord_from_grid(ctm_grid, region_box=region_box) if len(datablock['shape']) == 2: dim_coords = [(lon, 0), (lat, 1)] else: dim_coords = [(lon, 0), (lat, 1), (lev, 2)] coord_cache[cache_key] = dim_coords return dim_coords
def fix_bpch2nc(cube, field, filename): """ An Iris load callback for properly loading the NetCDF files created by BPCH2NC (GAMAP v2-12+). """ global _coordcache # units units = field.unit.strip() try: cube.units = units except ValueError: # Try to get equivalent units compatible with udunits. # Store original unit as cube attribute conform_units = ctm2cf.get_cfcompliant_units(units) try: cube.units = conform_units except ValueError: warnings.warn("Invalid udunits2 '{0}'".format(units)) cube.attributes["ctm_units"] = units # a hack for keeping cube's long_name but show var_name in cube summary iris.std_names.STD_NAMES[cube.var_name] = {'canonical_units': cube.units} cube.standard_name = cube.var_name # add spatial coordinates modelname = cube.attributes.get('Model') res = cube.attributes.get('Delta_Lon'), cube.attributes.get('Delta_Lat') nlayers = cube.attributes.get('NLayers') cache_key = modelname, res, nlayers if _coordcache.get(cache_key) is None: ctm_grid = grid.CTMGrid.from_model(modelname, resolution=res) coord_names = ('longitude', 'latitude', 'levels') lon, lat, lev = irisutil.coord_from_grid(ctm_grid, coord=coord_names) _coordcache[cache_key] = {'lon': lon, 'lat': lat, 'lev': lev} if cube.ndim == 3: cube.add_dim_coord(_coordcache[cache_key]['lon'], 2) cube.add_dim_coord(_coordcache[cache_key]['lat'], 1) if cube.shape[0] == nlayers: cube.add_dim_coord(_coordcache[cache_key]['lev'], 0) # add time scalar coordinate (get info from global attributes) tstart = (str(cube.attributes['Start_Date']) + str(cube.attributes['Start_Time'])) tstart = datetime.datetime.strptime(tstart, "%Y%m%d%H") tstart = timeutil.time2tau(tstart) tend = (str(cube.attributes['End_Date']) + str(cube.attributes['End_Time'])) tend = datetime.datetime.strptime(tend, "%Y%m%d%H") tend = timeutil.time2tau(tend) time_coord = iris.coords.DimCoord(points=tstart, bounds=(tstart, tend), standard_name='time', units=irisutil.CTM_TIME_UNIT_IRIS) cube.add_aux_coord(time_coord) # attributes # TODO: don't remove all attributes cube.attributes.clear() # handle 2D fields (remove 1-sized 1st dimension) if cube.ndim == 3 and cube.shape[0] == 1: #dummy_coord = iris.coords.DimCoord([0], long_name='dummy', # bounds=[0, 1]) #cube.add_dim_coord(dummy_coord, 0) return cube.slices(range(1, cube.ndim)).next()
def cubes_from_bpch(filenames, callback=None, **kwargs): """ Return a generator of Iris cubes from BPCH filenames. Parameters ---------- filenames : string or sequence of strings (list of) BPCH filename(s) to load (can be UNIX expressions, e.g., '*.bpch'). callback : func a function which can be passed on to :func:`iris.io.run_callback` **kwargs any extra keyword argument passed to :class:`pygchem.utils.uff.FortranFile` (e.g., `endian`). Notes ----- The resultant cubes may not be in the same order as in the files. """ if isinstance(filenames, StringTypes): filenames = [filenames] for filename in filenames: for path in glob.glob(filename): filetype, filetitle, datablocks = bpch.read_bpch(path, **kwargs) # assume that CTM Grid is the same for all datablocks # (compute coordinates once per file to save CPU/memory). leading_datablock = datablocks[0] ctm_grid = grid.CTMGrid.from_model( leading_datablock['modelname'], resolution=leading_datablock['resolution'] ) if leading_datablock['origin'] != (1, 1, 1): imin = np.array(leading_datablock['origin']) - 1 imax = imin + np.array(leading_datablock['shape']) region_box = zip(imin, imax) else: region_box = None lon, lat, lev = irisutil.coord_from_grid(ctm_grid, region_box=region_box) for datablock in datablocks: if len(datablock['shape']) == 2: dim_coords = [(lon, 0), (lat, 1)] else: dim_coords = [(lon, 0), (lat, 1), (lev, 2)] # 3D default cube = irisutil._datablock_to_cube( datablock, dim_coords_and_dims=dim_coords, coords_from_model=False, errcoord='pass' ) # add `filetitle` in the cube's attributes # (commented: may not properly concatenate cubes) #cube.attributes['bpch_title'] = filetitle if callback is not None: cube = iris.io.run_callback(callback, cube, datablock, filename) if cube is None: continue yield cube