def _create_cube(self, filenames, variable): import numpy as np from cis.data_io.hdf import _read_hdf4 from iris.cube import Cube, CubeList from iris.coords import DimCoord, AuxCoord from cis.time_util import calculate_mid_time, cis_standard_time_unit from cis.data_io.hdf_sd import get_metadata from cf_units import Unit variables = ['XDim', 'YDim', variable] logging.info("Listing coordinates: " + str(variables)) cube_list = CubeList() # Read each file individually, let Iris do the merging at the end. for f in filenames: sdata, vdata = _read_hdf4(f, variables) lat_coord = DimCoord(_get_MODIS_SDS_data(sdata['YDim']), standard_name='latitude', units='degrees') lon_coord = DimCoord(_get_MODIS_SDS_data(sdata['XDim']), standard_name='longitude', units='degrees') # create time coordinate using the midpoint of the time delta between the start date and the end date start_datetime = self._get_start_date(f) end_datetime = self._get_end_date(f) mid_datetime = calculate_mid_time(start_datetime, end_datetime) logging.debug("Using {} as datetime for file {}".format( mid_datetime, f)) time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit, bounds=[start_datetime, end_datetime]) var = sdata[variable] metadata = get_metadata(var) try: units = Unit(metadata.units) except ValueError: logging.warning( "Unable to parse units '{}' in {} for {}.".format( metadata.units, f, variable)) units = None cube = Cube(_get_MODIS_SDS_data(sdata[variable]), dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)], aux_coords_and_dims=[(time_coord, None)], var_name=metadata._name, long_name=metadata.long_name, units=units) cube_list.append(cube) # Merge the cube list across the scalar time coordinates before returning a single cube. return cube_list.merge_cube()
def test_read_hdf4(): filename = escape_colons(valid_hdf_sd_file) sds, vds = _read_hdf4(filename, ['Solution_Ocean', 'Path_Radiance_Land', 'Mean_Reflectance_Land']) # VD variable are listed in the VD part of the tuple, but not in the SD part eq_(True, 'Solution_Ocean' in vds) eq_(False, 'Solution_Ocean' in sds) # SD variable are listed in the SD part of the tuple, but not in the VD part eq_(True, 'Path_Radiance_Land' in sds) eq_(False, 'Path_Radiance_Land' in vds) eq_(True, 'Mean_Reflectance_Land' in sds) eq_(False, 'Mean_Reflectance_Land' in vds)
def _create_cube(self, filenames, variable): import numpy as np from cis.data_io.hdf import _read_hdf4 from cis.data_io import hdf_vd from iris.cube import Cube, CubeList from iris.coords import DimCoord, AuxCoord from cis.time_util import calculate_mid_time, cis_standard_time_unit from cis.data_io.hdf_sd import get_metadata from cf_units import Unit variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable] logging.info("Listing coordinates: " + str(variables)) cube_list = CubeList() # Read each file individually, let Iris do the merging at the end. for f in filenames: sdata, vdata = _read_hdf4(f, variables) lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid'])) lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid'])) lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees') lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees') # create time coordinate using the midpoint of the time delta between the start date and the end date start_datetime = self._get_start_date(f) end_datetime = self._get_end_date(f) mid_datetime = calculate_mid_time(start_datetime, end_datetime) logging.debug("Using {} as datetime for file {}".format(mid_datetime, f)) time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit, bounds=[start_datetime, end_datetime]) var = sdata[variable] metadata = get_metadata(var) try: units = Unit(metadata.units) except ValueError: logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable)) units = None cube = Cube(_get_MODIS_SDS_data(sdata[variable]), dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)], aux_coords_and_dims=[(time_coord, None)], var_name=metadata._name, long_name=metadata.long_name, units=units) cube_list.append(cube) # Merge the cube list across the scalar time coordinates before returning a single cube. return cube_list.merge_cube()
def test_that_cannot_read_unknown_variables_and_valid_variables(): filename = escape_colons(valid_hdf_sd_file) sds, vds = _read_hdf4(filename, ['someBizarreVariableNobodyKnowsAbout', 'Solution_Ocean', 'Path_Radiance_Land', 'Mean_Reflectance_Land'])
def test_that_cannot_read_unknown_variables(): filename = escape_colons(valid_hdf_sd_file) sds, vds = _read_hdf4(filename, ['athing', 'unechose', 'einding'])
def should_raise_io_error_with_non_hdf_file(): _read_hdf4(valid_cloud_cci_filename, valid_cloud_cci_variable)