def to_cube(self): """Return a new :class:`~iris.cube.Cube` from this ABFField.""" cube = iris.cube.Cube(self.data) # Name. if self.format.lower() == "abf": cube.rename("leaf_area_index") elif self.format.lower() == "abl": cube.rename("FAPAR") else: msg = "Unknown ABF/ABL format: {}".format(self.format) raise iris.exceptions.TranslationError(msg) cube.units = "%" # Grid. step = 1.0 / 12.0 llcs = GeogCS(semi_major_axis=6378137.0, semi_minor_axis=6356752.31424) x_coord = DimCoord(np.arange(X_SIZE) * step + (step / 2) - 180, standard_name="longitude", units="degrees", coord_system=llcs) y_coord = DimCoord(np.arange(Y_SIZE) * step + (step / 2) - 90, standard_name="latitude", units="degrees", coord_system=llcs) x_coord.guess_bounds() y_coord.guess_bounds() cube.add_dim_coord(x_coord, 1) cube.add_dim_coord(y_coord, 0) # Time. if self.period == "a": start = 1 end = 15 elif self.period == "b": start = 16 end = calendar.monthrange(self.year, self.month)[1] else: raise iris.exceptions.TranslationError("Unknown period: " "{}".format(self.period)) start = datetime.date(year=self.year, month=self.month, day=start) end = datetime.date(year=self.year, month=self.month, day=end) # Convert to "days since 0001-01-01". # Iris will have proper datetime objects in the future. # This step will not be necessary. start = start.toordinal() - 1 end = end.toordinal() - 1 # TODO: Should we put the point in the middle of the period instead? cube.add_aux_coord( AuxCoord(start, standard_name="time", units="days since 0001-01-01", bounds=[start, end])) # TODO: Do they only come from Boston? # Attributes. cube.attributes["source"] = "Boston University" return cube
def setUp(self): masked_data = ma.masked_greater([1, 2, 3, 4, 5], 3) self.cube = Cube(as_lazy_data(masked_data)) self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0)
def test_additional_scalar_dimension(self): """Test accepts cube with single realization coordinate""" vel = self.valid.copy() vel.add_aux_coord(DimCoord(1, standard_name="realization")) check_input_coords(vel)
def test_lazy_core(self): coord = DimCoord(self.pts_real, bounds=self.bds_lazy) result = coord.has_lazy_bounds() self.assertFalse(result)
def test_copy_array(self): # Assigning points creates a copy pts = np.array([1, 2, 3]) coord = DimCoord(pts) pts[1] = 5 self.assertEqual(coord.points[1], 2)
def test_lazy_core(self): coord = DimCoord(self.pts_lazy) result = coord.lazy_points() self.assertEqualLazyArraysAndDtypes(result, self.pts_lazy)
def test_lazy_core(self): coord = DimCoord(self.pts_real, bounds=self.bds_lazy) result = coord.lazy_bounds() self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy)
def experiment(cube, field): """Add an 'experiment number' to the cube, if present in the field.""" if field.experiment_num != field.int_mdi: cube.add_aux_coord( DimCoord(field.experiment_num, long_name="experiment_number"))
def ensemble_member(cube, field): """Add an 'ensemble member' coord to the cube, if present in the field.""" ensemble_member = getattr(field, "ensemble_member") if ensemble_member != field.int_mdi: cube.add_aux_coord(DimCoord(ensemble_member, "realization"))
def _make_cube(data, aux_coord=True, dim_coord=True, dtype=None): """ Create a 3d synthetic test cube. """ if dtype is None: dtype = np.dtype('int8') if not isinstance(data, np.ndarray): data = np.empty(data, dtype=dtype) z, y, x = data.shape # Create the cube. cm = CellMethod(method='mean', coords='time', intervals='20 minutes', comments=None) kwargs = dict(standard_name='air_temperature', long_name='Air Temperature', var_name='ta', units='K', attributes=dict(cube='attribute'), cell_methods=(cm, )) cube = iris.cube.Cube(data, **kwargs) # Create a synthetic test vertical coordinate. if dim_coord: cube.add_dim_coord(_make_vcoord(z, dtype=dtype), 0) # Create a synthetic test latitude coordinate. data = np.arange(y, dtype=dtype) + 1 cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) kwargs = dict(standard_name='latitude', long_name='Latitude', var_name='lat', units='degrees_north', attributes=dict(latitude='attribute'), coord_system=cs) ycoord = DimCoord(data, **kwargs) if data.size > 1: ycoord.guess_bounds() cube.add_dim_coord(ycoord, 1) # Create a synthetic test longitude coordinate. data = np.arange(x, dtype=dtype) + 1 kwargs = dict(standard_name='longitude', long_name='Longitude', var_name='lon', units='degrees_east', attributes=dict(longitude='attribute'), coord_system=cs) xcoord = DimCoord(data, **kwargs) if data.size > 1: xcoord.guess_bounds() cube.add_dim_coord(xcoord, 2) # Create a synthetic test 2d auxiliary coordinate # that spans the vertical dimension. if aux_coord: data = np.arange(np.prod((z, y)), dtype=dtype).reshape(z, y) kwargs = dict(standard_name=None, long_name='Pressure Slice', var_name='aplev', units='hPa', attributes=dict(positive='down'), coord_system=None) zycoord = AuxCoord(data, **kwargs) cube.add_aux_coord(zycoord, (0, 1)) return cube
def test_pseudo_level(self): pseudo_level = 123 coord = DimCoord(pseudo_level, long_name='pseudo_level', units='1') self.cube.add_aux_coord(coord) lbuser5_produced = _pp_save_ppfield_values(self.cube).lbuser[4] self.assertEqual(pseudo_level, lbuser5_produced)
def create_data_object(self, filenames, variable, index_offset=1): from cis.data_io.hdf_vd import get_data from cis.data_io.hdf_vd import VDS from pyhdf.error import HDF4Error from cis.data_io import hdf_sd from iris.coords import DimCoord, AuxCoord from iris.cube import Cube from cis.data_io.gridded_data import GriddedData from cis.time_util import cis_standard_time_unit logging.debug("Creating data object for variable " + variable) variables = ['Latitude', 'Longitude', "Profile_Time", "Pressure"] logging.info("Listing coordinates: " + str(variables)) variables.append(variable) # reading data from files sdata = {} for filename in filenames: try: sds_dict = hdf_sd.read(filename, variables) except HDF4Error as e: raise IOError(str(e)) for var in list(sds_dict.keys()): utils.add_element_to_list_in_dict(sdata, var, sds_dict[var]) alt_name = "altitude" logging.info("Additional coordinates: '" + alt_name + "'") # work out size of data arrays # the coordinate variables will be reshaped to match that. # NOTE: This assumes that all Caliop_L1 files have the same altitudes. # If this is not the case, then the following line will need to be changed # to concatenate the data from all the files and not just arbitrarily pick # the altitudes from the first file. alt_data = get_data(VDS(filenames[0], "Lidar_Data_Altitudes"), True) alt_coord = DimCoord(alt_data, standard_name='altitude', units='km') alt_coord.convert_units('m') lat_data = hdf.read_data(sdata['Latitude'], self._get_calipso_data)[:, index_offset] lat_coord = AuxCoord(lat_data, standard_name='latitude') pres_data = hdf.read_data(sdata['Pressure'], self._get_calipso_data) pres_coord = AuxCoord(pres_data, standard_name='air_pressure', units='hPa') # longitude lon = sdata['Longitude'] lon_data = hdf.read_data(lon, self._get_calipso_data)[:, index_offset] lon_coord = AuxCoord(lon_data, standard_name='longitude') # profile time, x time = sdata['Profile_Time'] time_data = hdf.read_data(time, self._get_calipso_data)[:, index_offset] time_coord = DimCoord(time_data, long_name='Profile_Time', standard_name='time', units="seconds since 1993-01-01 00:00:00") time_coord.convert_units(cis_standard_time_unit) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") if variable in MIXED_RESOLUTION_VARIABLES: logging.warning( "Using Level 2 resolution profile for mixed resolution variable {}. See CALIPSO " "documentation for more details".format(variable)) data = hdf.read_data(var, self._get_mixed_resolution_calipso_data) else: data = hdf.read_data(var, self._get_calipso_data) cube = Cube(data, long_name=metadata.long_name, units=self.clean_units(metadata.units), dim_coords_and_dims=[(alt_coord, 1), (time_coord, 0)], aux_coords_and_dims=[(lat_coord, (0, )), (lon_coord, (0, )), (pres_coord, (0, 1))]) gd = GriddedData.make_from_cube(cube) return gd
def create_data_object(self, filenames, variable, index_offset=1): from cis.data_io.hdf_vd import get_data from cis.data_io.hdf_vd import VDS from pyhdf.error import HDF4Error from cis.data_io import hdf_sd from iris.coords import DimCoord, AuxCoord from iris.cube import Cube, CubeList from cis.data_io.gridded_data import GriddedData from cis.time_util import cis_standard_time_unit from datetime import datetime from iris.util import new_axis import numpy as np logging.debug("Creating data object for variable " + variable) variables = ["Pressure_Mean"] logging.info("Listing coordinates: " + str(variables)) variables.append(variable) # reading data from files sdata = {} for filename in filenames: try: sds_dict = hdf_sd.read(filename, variables) except HDF4Error as e: raise IOError(str(e)) for var in list(sds_dict.keys()): utils.add_element_to_list_in_dict(sdata, var, sds_dict[var]) # work out size of data arrays # the coordinate variables will be reshaped to match that. # NOTE: This assumes that all Caliop_L1 files have the same altitudes. # If this is not the case, then the following line will need to be changed # to concatenate the data from all the files and not just arbitrarily pick # the altitudes from the first file. alt_data = self._get_calipso_data( hdf_sd.HDF_SDS(filenames[0], 'Altitude_Midpoint'))[0, :] alt_coord = DimCoord(alt_data, standard_name='altitude', units='km') alt_coord.convert_units('m') lat_data = self._get_calipso_data( hdf_sd.HDF_SDS(filenames[0], 'Latitude_Midpoint'))[0, :] lat_coord = DimCoord(lat_data, standard_name='latitude', units='degrees_north') lon_data = self._get_calipso_data( hdf_sd.HDF_SDS(filenames[0], 'Longitude_Midpoint'))[0, :] lon_coord = DimCoord(lon_data, standard_name='longitude', units='degrees_east') cubes = CubeList() for f in filenames: t = get_data(VDS(f, "Nominal_Year_Month"), True)[0] time_data = cis_standard_time_unit.date2num( datetime(int(t[0:4]), int(t[4:6]), 15)) time_coord = AuxCoord(time_data, long_name='Profile_Time', standard_name='time', units=cis_standard_time_unit) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") data = self._get_calipso_data(hdf_sd.HDF_SDS(f, variable)) pres_data = self._get_calipso_data( hdf_sd.HDF_SDS(f, 'Pressure_Mean')) pres_coord = AuxCoord(pres_data, standard_name='air_pressure', units='hPa') if data.ndim == 2: # pres_coord = new_axis() cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units), dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)], aux_coords_and_dims=[(time_coord, ())]) # Promote the time scalar coord to a length one dimension new_cube = new_axis(cube, 'time') cubes.append(new_cube) elif data.ndim == 3: # pres_coord = new_axis() cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units), dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1), (alt_coord, 2)], aux_coords_and_dims=[(time_coord, ())]) # Promote the time scalar coord to a length one dimension new_cube = new_axis(cube, 'time') # Then add the (extended) pressure coord so that it is explicitly a function of time new_cube.add_aux_coord(pres_coord[np.newaxis, ...], (0, 1, 2, 3)) cubes.append(new_cube) else: raise ValueError( "Unexpected number of dimensions for CALIOP data: {}". format(data.ndim)) # Concatenate the cubes from each file into a single GriddedData object gd = GriddedData.make_from_cube(cubes.concatenate_cube()) return gd
def sample_mesh_cube(nomesh_faces=None, n_z=2, with_parts=False, **meshcoord_kwargs): """ Create a 2d test cube with 1 'normal' and 1 unstructured dimension (with a Mesh). Result contains : dimcoords for both dims; an auxcoord on the unstructured dim; 2 mesh-coords. By default, the mesh is provided by :func:`sample_mesh`, so coordinates and connectivity are not realistic. Kwargs: * nomesh_faces (int or None): If set, don't add MeshCoords, so dim 1 is just a plain anonymous dim. Set its length to the given value. * n_z (int): Length of the 'normal' dim. If 0, it is *omitted*. * with_parts (bool): If set, return all the constituent component coords * meshcoord_kwargs (dict): Extra controls passed to :func:`sample_meshcoord` for MeshCoord creation, to allow user-specified location/mesh. The 'axis' key is not available, as we always add both an 'x' and 'y' MeshCOord. Returns: * cube : if with_parts not set * (cube, parts) : if with_parts is set 'parts' is (mesh, dim0-dimcoord, dim1-dimcoord, dim1-auxcoord, x-meshcoord [or None], y-meshcoord [or None]). """ nomesh = nomesh_faces is not None if nomesh: mesh = None n_faces = nomesh_faces else: mesh = meshcoord_kwargs.pop("mesh", None) if mesh is None: mesh = sample_mesh() meshx, meshy = (sample_meshcoord(axis=axis, mesh=mesh, **meshcoord_kwargs) for axis in ("x", "y")) n_faces = meshx.shape[0] mesh_dimco = DimCoord(np.arange(n_faces), long_name="i_mesh_face", units="1") auxco_x = AuxCoord(np.zeros(n_faces), long_name="mesh_face_aux", units="1") zco = DimCoord(np.arange(n_z), long_name="level", units=1) cube = Cube(np.zeros((n_z, n_faces)), long_name="mesh_phenom") cube.add_dim_coord(zco, 0) if nomesh: mesh_coords = [] else: mesh_coords = [meshx, meshy] cube.add_dim_coord(mesh_dimco, 1) for co in mesh_coords + [auxco_x]: cube.add_aux_coord(co, 1) if not with_parts: result = cube else: if nomesh: meshx, meshy = None, None parts = (mesh, zco, mesh_dimco, auxco_x, meshx, meshy) result = (cube, parts) return result
def test_lazy_points(self): lazy_data = self.pts_lazy coord = DimCoord(lazy_data) result = coord.core_points() self.assertEqualRealArraysAndDtypes(result, self.pts_real)
def set_up_variable_cube( data, name="air_temperature", units="K", spatial_grid="latlon", time=datetime(2017, 11, 10, 4, 0), time_bounds=None, frt=datetime(2017, 11, 10, 0, 0), realizations=None, include_scalar_coords=None, attributes=None, standard_grid_metadata=None, ): """ Set up a cube containing a single variable field with: - x/y spatial dimensions (equal area or lat / lon) - optional leading "realization" dimension - "time", "forecast_reference_time" and "forecast_period" scalar coords - option to specify additional scalar coordinates - configurable attributes Args: data (numpy.ndarray): 2D (y-x ordered) or 3D (realization-y-x ordered) array of data to put into the cube. name (str): Variable name (standard / long) units (str): Variable units spatial_grid (str): What type of x/y coordinate values to use. Permitted values are "latlon" or "equalarea". time (datetime.datetime): Single cube validity time time_bounds (tuple or list of datetime.datetime instances): Lower and upper bound on time point, if required frt (datetime.datetime): Single cube forecast reference time realizations (list or numpy.ndarray): List of forecast realizations. If not present, taken from the leading dimension of the input data array (if 3D). include_scalar_coords (list): List of iris.coords.DimCoord or AuxCoord instances of length 1. attributes (dict): Optional cube attributes. standard_grid_metadata (str): Recognised mosg__model_configuration for which to set up Met Office standard grid attributes. Should be 'uk_det', 'uk_ens', 'gl_det' or 'gl_ens'. """ # construct spatial dimension coordimates ypoints = data.shape[-2] xpoints = data.shape[-1] y_coord, x_coord = construct_xy_coords(ypoints, xpoints, spatial_grid) # construct realization dimension for 3D data, and dim_coords list ndims = len(data.shape) if ndims == 3: if realizations is not None: if len(realizations) != data.shape[0]: raise ValueError( "Cannot generate {} realizations from data of shape " "{}".format(len(realizations), data.shape)) realizations = np.array(realizations) if issubclass(realizations.dtype.type, np.integer): # expect integer realizations realizations = realizations.astype(np.int32) else: # option needed for percentile & probability cube setup realizations = realizations.astype(np.float32) else: realizations = np.arange(data.shape[0]).astype(np.int32) realization_coord = DimCoord(realizations, "realization", units="1") dim_coords = [(realization_coord, 0), (y_coord, 1), (x_coord, 2)] elif ndims == 2: dim_coords = [(y_coord, 0), (x_coord, 1)] else: raise ValueError( "Expected 2 or 3 dimensions on input data: got {}".format(ndims)) # construct list of aux_coords_and_dims scalar_coords = construct_scalar_time_coords(time, time_bounds, frt) if include_scalar_coords is not None: for coord in include_scalar_coords: scalar_coords.append((coord, None)) # set up attributes cube_attrs = {} if standard_grid_metadata is not None: cube_attrs.update(MOSG_GRID_DEFINITION[standard_grid_metadata]) if attributes is not None: cube_attrs.update(attributes) # create data cube cube = iris.cube.Cube( data, units=units, attributes=cube_attrs, dim_coords_and_dims=dim_coords, aux_coords_and_dims=scalar_coords, ) cube.rename(name) # don't allow unit tests to set up invalid cubes check_mandatory_standards(cube) return cube
def test_real_bounds(self): coord = DimCoord(self.pts_real, bounds=self.bds_real) result = coord.core_bounds() self.assertArraysDoNotShareData( result, self.bds_real, 'core_bounds() are the same data as the internal array.')
def add_coordinate( incube, coord_points, coord_name, coord_units=None, dtype=np.float32, order=None, is_datetime=False, attributes=None, ): """ Function to duplicate a sample cube with an additional coordinate to create a cubelist. The cubelist is merged to create a single cube, which can be reordered to place the new coordinate in the required position. Args: incube (iris.cube.Cube): Cube to be duplicated. coord_points (list or numpy.ndarray): Values for the coordinate. coord_name (str): Long name of the coordinate to be added. coord_units (str): Coordinate unit required. dtype (type): Datatype for coordinate points. order (list): Optional list of integers to reorder the dimensions on the new merged cube. For example, if the new coordinate is required to be in position 1 on a 4D cube, use order=[1, 0, 2, 3] to swap the new coordinate position with that of the original leading coordinate. is_datetime (bool): If "true", the leading coordinate points have been given as a list of datetime objects and need converting. In this case the "coord_units" argument is overridden and the time points provided in seconds. The "dtype" argument is overridden and set to int64. attributes (dict): Optional coordinate attributes. Returns: iris.cube.Cube: Cube containing an additional dimension coordinate. """ # if the coordinate already exists as a scalar coordinate, remove it cube = incube.copy() try: cube.remove_coord(coord_name) except CoordinateNotFoundError: pass # if new coordinate points are provided as datetimes, convert to seconds if is_datetime: coord_units = TIME_COORDS["time"].units dtype = TIME_COORDS["time"].dtype new_coord_points = [_create_time_point(val) for val in coord_points] coord_points = new_coord_points cubes = iris.cube.CubeList([]) for val in coord_points: temp_cube = cube.copy() temp_cube.add_aux_coord( DimCoord( np.array([val], dtype=dtype), long_name=coord_name, units=coord_units, attributes=attributes, )) # recalculate forecast period if time or frt have been updated if ("time" in coord_name and coord_units is not None and Unit(coord_units).is_time_reference()): forecast_period = forecast_period_coord( temp_cube, force_lead_time_calculation=True) try: temp_cube.replace_coord(forecast_period) except CoordinateNotFoundError: temp_cube.add_aux_coord(forecast_period) cubes.append(temp_cube) new_cube = cubes.merge_cube() if order is not None: new_cube.transpose(order) return new_cube
def test_no_bounds(self): coord = DimCoord(self.pts_real) result = coord.lazy_bounds() self.assertIsNone(result)
def build_spotdata_cube( data, name, units, altitude, latitude, longitude, wmo_id, scalar_coords=None, neighbour_methods=None, grid_attributes=None, additional_dims=None, ): """ Function to build a spotdata cube with expected dimension and auxiliary coordinate structure. It can be used to create spot data cubes. In this case the data is the spot data values at each site, and the coordinates that describe each site. It can also be used to create cubes which describe the grid points that are used to extract each site from a gridded field, for different selection method. The selection methods are specified by the neighbour_methods coordinate. The grid_attribute coordinate encapsulates information required to extract data, for example the x/y indices that identify the grid point neighbour. .. See the documentation for examples of these cubes. .. include:: extended_documentation/spotdata/build_spotdata_cube/ build_spotdata_cube_examples.rst Args: data (float or numpy.ndarray): Float spot data or array of data points from several sites. The spot index should be the last dimension if the array is multi-dimensional (see optional additional dimensions below). name (str): Cube name (eg 'air_temperature') units (str): Cube units (eg 'K') altitude (float or numpy.ndarray): Float or 1d array of site altitudes in metres latitude (float or numpy.ndarray): Float or 1d array of site latitudes in degrees longitude (float or numpy.ndarray): Float or 1d array of site longitudes in degrees wmo_id (str or list of str): String or list of site 5-digit WMO identifiers scalar_coords (list of iris.coords.AuxCoord): Optional list of iris.coords.AuxCoord instances neighbour_methods (list of str): Optional list of neighbour method names, e.g. 'nearest' grid_attributes (list of str): Optional list of grid attribute names, e.g. x-index, y-index additional_dims (list of iris.coords.DimCoord): Optional list of additional dimensions to preceed the spot data dimension. Returns: iris.cube.Cube: A cube containing the extracted spot data with spot data being the final dimension. """ # construct auxiliary coordinates alt_coord = AuxCoord(altitude, "altitude", units="m") lat_coord = AuxCoord(latitude, "latitude", units="degrees") lon_coord = AuxCoord(longitude, "longitude", units="degrees") id_coord = AuxCoord(wmo_id, long_name="wmo_id", units="no_unit") aux_coords_and_dims = [] # append scalar coordinates if scalar_coords is not None: for coord in scalar_coords: aux_coords_and_dims.append((coord, None)) # construct dimension coordinates if np.isscalar(data): data = np.array([data]) spot_index = DimCoord( np.arange(data.shape[-1], dtype=np.int32), long_name="spot_index", units="1" ) dim_coords_and_dims = [] current_dim = 0 if neighbour_methods is not None: neighbour_methods_coord = DimCoord( np.arange(len(neighbour_methods), dtype=np.int32), long_name="neighbour_selection_method", units="1", ) neighbour_methods_key = AuxCoord( neighbour_methods, long_name="neighbour_selection_method_name", units="no_unit", ) dim_coords_and_dims.append((neighbour_methods_coord, current_dim)) aux_coords_and_dims.append((neighbour_methods_key, current_dim)) current_dim += 1 if grid_attributes is not None: grid_attributes_coord = DimCoord( np.arange(len(grid_attributes), dtype=np.int32), long_name="grid_attributes", units="1", ) grid_attributes_key = AuxCoord( grid_attributes, long_name="grid_attributes_key", units="no_unit" ) dim_coords_and_dims.append((grid_attributes_coord, current_dim)) aux_coords_and_dims.append((grid_attributes_key, current_dim)) current_dim += 1 if additional_dims is not None: for coord in additional_dims: dim_coords_and_dims.append((coord, current_dim)) current_dim += 1 dim_coords_and_dims.append((spot_index, current_dim)) for coord in [alt_coord, lat_coord, lon_coord, id_coord]: aux_coords_and_dims.append((coord, current_dim)) # create output cube spot_cube = iris.cube.Cube( data, long_name=name, units=units, dim_coords_and_dims=dim_coords_and_dims, aux_coords_and_dims=aux_coords_and_dims, ) # rename to force a standard name to be set if name is valid spot_cube.rename(name) return spot_cube
def test_lazy_core(self): coord = DimCoord(self.pts_lazy) result = coord.has_lazy_points() self.assertFalse(result)
def convert(year, month, ndays, remove=False): """ Now need to: - convert to q, RH , e, tw, DPD - aggregate to daily averages - regrid to 1by1 gridboxes """ MDI = -999. # Set up null_cube with desired gridding format to use as a template # Does this have to have the same time dimensions? # ndays = np.int(p_cube.data[:,0,0] / 24) time = DimCoord(np.arange(ndays * 24), standard_name='time', units='hours') latitude = DimCoord( np.linspace(89.5, -89.5, 180), # DIFF FROM OTHER ERA5 DOWNLOAD # latitude = DimCoord(np.linspace(90, -90, 181), standard_name='latitude', long_name='gridbox centre latitude', units='degrees_north') longitude = DimCoord( np.linspace(-179.5, 179.5, 360), # DIFF FROM OTHER ERA5 DOWNLOAD # longitude = DimCoord(np.linspace(0, 359, 360), standard_name='longitude', long_name='gridbox centre longitude', units='degrees_east') null_cube = Cube(np.zeros((ndays * 24, 180, 360), np.float32), dim_coords_and_dims=[(time, 0), (latitude, 1), (longitude, 2)]) print('Check null_cube for new grid') # pdb.set_trace() # START OF LSM************************************************ # read in land_sea_mask variable = "land_sea_mask" lsm_cube = iris.load( os.path.join(DataLoc, "{}{:02d}_hourly_{}.nc".format(year, month, variable))) #pdb.set_trace() # convert from list to cube lsm_cube = lsm_cube[0] # # REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster regridder = iris.analysis.Linear().regridder(lsm_cube, null_cube) lsm_cube_1by1 = regridder(lsm_cube) print('Check lsm_cube_1by1 for new grid') # pdb.set_trace()# # remove old cube lsm_cube = 0 lsm_cube_1by1 = lsm_cube_1by1[0, :, :] # lsm_cube_1by1_field = lsm_cube_1by1.extract(iris.Constraint(time=0)) lsm_cube_1by1.units = "1" print(lsm_cube_1by1) print('Check lsm_cube_1by1 for land_sea_mask') #pdb.set_trace() # output iris.save(lsm_cube_1by1, os.path.join( DataLoc, "{}{:02d}_hourly_{}_ERA5.nc".format(year, month, variable)), zlib=True) print('Check lsm_cube_1by1 output') # pdb.set_trace() # END OF LSM************************************************************ # remove input files if remove: for variable in [ "2m_temperature", "2m_dewpoint_temperature", "surface_pressure" ]: # for variable in ["2m_temperature", "2m_dewpoint_temperature", "surface_pressure", "land_sea_mask"]: os.remove( os.path.join( DataLoc, "{}{:02d}_hourly_{}.nc".format(year, month, variable))) return # combine
def test_f16(self): test_dtype = np.float16 coord = DimCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) result = coord.bounds_dtype self.assertEqual(result, test_dtype)
def test_formula_terms_no_p0_term(self): coord_a = DimCoord(np.arange(5), units='Pa') self.provides['coordinates'].append((coord_a, 'a')) self.requires['formula_terms'] = dict(a='a', b='b', ps='ps') _load_aux_factory(self.engine, self.cube) self._check_no_delta()
def test_fail_nonmonotonic(self): msg = 'must be strictly monotonic' with self.assertRaisesRegex(ValueError, msg): DimCoord([1, 2, 0, 3])
def test_formula_terms_no_a_term(self): coord_p0 = DimCoord(10, units='1') self.provides['coordinates'].append((coord_p0, 'p0')) self.requires['formula_terms'] = dict(a='p0', b='b', ps='ps') _load_aux_factory(self.engine, self.cube) self._check_no_delta()
def test_valid(self): coords_and_dims = _convert_scalar_pseudo_level_coords(lbuser5=21) self.assertEqual(coords_and_dims, [(DimCoord([21], long_name='pseudo_level'), None)])
def test_formula_terms_p0_non_scalar(self): coord_p0 = DimCoord(np.arange(5)) self.provides['coordinates'].append((coord_p0, 'p0')) self.requires['formula_terms'] = dict(p0='p0') with self.assertRaises(ValueError): _load_aux_factory(self.engine, self.cube)
'lat lon shape of emission file') return time_series, lat_sat, lon_sat, emission_mon_mean, sat_mon_mean time_series, lat, lon, emission_mon_mean, sat_mon_mean = monthly_mean_cal() print(sat_mon_mean.shape, '!sat_mon_mean.shape') print(emission_mon_mean.shape, '!emission_mon_mean.shape') #regridding using iris lat_min, lon_min = np.nanmin(lat), np.nanmin(lon) lat_max, lon_max = np.nanmax(lat), np.nanmax(lon) lat01 = np.arange(lat_min, lat_max, 0.1) lon01 = np.arange(lon_min, lon_max, 0.1) latitude = DimCoord(lat, standard_name='latitude', units='degrees') longitude = DimCoord(lon, standard_name='longitude', units='degrees') time = DimCoord(np.linspace(1, 12, 12), standard_name='time', units='month') #print (time) cube1 = Cube(sat_mon_mean, dim_coords_and_dims=[(latitude, 1), (longitude, 2), (time, 0)]) cube2 = Cube(emission_mon_mean, dim_coords_and_dims=[(latitude, 1), (longitude, 2), (time, 0)]) regridded_data_sat = cube1.interpolate([('latitude', lat01), ('longitude', lon01)], iris.analysis.Linear()) print(regridded_data_sat.shape, 'regrid_sat')
def test_1d_discontigous_masked(self): # Test a 1D coordinate which is discontiguous but masked at # discontiguities. coord = DimCoord([1, 3, 5], bounds=[[0, 2], [2, 4], [5, 6]]) data = ma.array(np.array([278, 300, 282]), mask=[0, 1, 0]) _check_bounds_contiguity_and_mask(coord, data, atol=1e-3)