def test_cf_data_contiguous(self): # Chunks 'contiguous' is equivalent to no chunks. chunks = "contiguous" cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] self.assertArrayEqual(lazy_data_chunks, self.expected_chunks)
def test_cf_data_chunks(self): chunks = [2500, 240, 200] cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] expected_chunks = _optimum_chunksize(chunks, self.shape) self.assertArrayEqual(lazy_data_chunks, expected_chunks)
def test_cf_data_contiguous(self): # Chunks 'contiguous' is equivalent to no chunks. chunks = 'contiguous' cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] self.assertArrayEqual(lazy_data_chunks, self.expected_chunks)
def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by # `iris._lazy_data._optimum_chunksize()`. chunks = None cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] self.assertArrayEqual(lazy_data_chunks, self.expected_chunks)
def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by # `iris._lazy_data._limited_shape()`. chunks = None cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] self.assertArrayEqual(lazy_data_chunks, self.expected_chunks)
def build_cell_measures(engine, cf_cm_var): """Create a CellMeasure instance and add it to the cube.""" cf_var = engine.cf_var cube = engine.cube attributes = {} # Get units attr_units = get_attr_units(cf_cm_var, attributes) # Get (lazy) content array data = _get_cf_var_data(cf_cm_var, engine.filename) # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. common_dims = [ dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions ] data_dims = None if common_dims: # Calculate the offset of each common dimension. data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] # Determine the standard_name, long_name and var_name standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) # Obtain the cf_measure. measure = cf_cm_var.cf_measure # Create the CellMeasure cell_measure = iris.coords.CellMeasure( data, standard_name=standard_name, long_name=long_name, var_name=var_name, units=attr_units, attributes=attributes, measure=measure, ) # Add it to the cube cube.add_cell_measure(cell_measure, data_dims) # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["cell_measures"].append( (cell_measure, cf_cm_var.cf_name))
def build_ancil_var(engine, cf_av_var): """Create an AncillaryVariable instance and add it to the cube.""" cf_var = engine.cf_var cube = engine.cube attributes = {} # Get units attr_units = get_attr_units(cf_av_var, attributes) # Get (lazy) content array data = _get_cf_var_data(cf_av_var, engine.filename) # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the AV being built. common_dims = [ dim for dim in cf_av_var.dimensions if dim in cf_var.dimensions ] data_dims = None if common_dims: # Calculate the offset of each common dimension. data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] # Determine the standard_name, long_name and var_name standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) # Create the AncillaryVariable av = iris.coords.AncillaryVariable( data, standard_name=standard_name, long_name=long_name, var_name=var_name, units=attr_units, attributes=attributes, ) # Add it to the cube cube.add_ancillary_variable(av, data_dims) # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name))
def build_auxiliary_coordinate(engine, cf_coord_var, coord_name=None, coord_system=None): """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" cf_var = engine.cf_var cube = engine.cube attributes = {} # Get units attr_units = get_attr_units(cf_coord_var, attributes) # Get any coordinate point data. if isinstance(cf_coord_var, cf.CFLabelVariable): points_data = cf_coord_var.cf_label_data(cf_var) else: points_data = _get_cf_var_data(cf_coord_var, engine.filename) # Get any coordinate bounds. cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) if cf_bounds_var is not None: bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) # Handle transposed bounds where the vertex dimension is not # the last one. Test based on shape to support different # dimension names. if cf_bounds_var.shape[:-1] != cf_coord_var.shape: # Resolving the data to a numpy array (i.e. *not* masked) for # compatibility with array creators (i.e. dask) bounds_data = np.asarray(bounds_data) bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) else: bounds_data = None # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. common_dims = [ dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions ] data_dims = None if common_dims: # Calculate the offset of each common dimension. data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] # Determine the standard_name, long_name and var_name standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) # Create the coordinate coord = iris.coords.AuxCoord( points_data, standard_name=standard_name, long_name=long_name, var_name=var_name, units=attr_units, bounds=bounds_data, attributes=attributes, coord_system=coord_system, climatological=climatological, ) # Add it to the cube cube.add_aux_coord(coord, data_dims) # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name))
def test_cf_data_type(self): chunks = [1, 12, 100] cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) self.assertIsInstance(lazy_data, dask_array)
def test_cf_data_chunks(self): chunks = [1, 12, 100] cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var, self.filename) lazy_data_chunks = [c[0] for c in lazy_data.chunks] self.assertArrayEqual(chunks, lazy_data_chunks)