def _create_coord_list(self, filenames): from cis.time_util import cis_standard_time_unit # list of coordinate variables we are interested in variables = ['Latitude', 'Longitude', 'TAI_start', 'Profile_time', 'Height'] # reading the various files try: logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) # altitude coordinate height = sdata['Height'] height_data = hdf.read_data(height, "SD") height_metadata = hdf.read_metadata(height, "SD") height_coord = Coord(height_data, height_metadata, "Y") except InvalidVariableError: # This means we are reading a Cloudsat file without height, so remove height from the variables list variables.remove('Height') logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) height_data = None height_coord = None # latitude lat = vdata['Latitude'] lat_data = hdf.read_data(lat, "VD") if height_data is not None: lat_data = utils.expand_1d_to_2d_array(lat_data, len(height_data[0]), axis=1) lat_metadata = hdf.read_metadata(lat, "VD") lat_metadata.shape = lat_data.shape lat_coord = Coord(lat_data, lat_metadata) # longitude lon = vdata['Longitude'] lon_data = hdf.read_data(lon, "VD") if height_data is not None: lon_data = utils.expand_1d_to_2d_array(lon_data, len(height_data[0]), axis=1) lon_metadata = hdf.read_metadata(lon, "VD") lon_metadata.shape = lon_data.shape lon_coord = Coord(lon_data, lon_metadata) # time coordinate time_data = self._generate_time_array(vdata) if height_data is not None: time_data = utils.expand_1d_to_2d_array(time_data, len(height_data[0]), axis=1) time_coord = Coord(time_data, Metadata(name='Profile_time', standard_name='time', shape=time_data.shape, units=str(cis_standard_time_unit), calendar=cis_standard_time_unit.calendar), "X") # create object containing list of coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) if height_coord is not None: coords.append(height_coord) coords.append(time_coord) return coords
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates coords = self._create_coord_list(filenames) # reading of variables sdata, vdata = hdf.read(filenames, variable) # missing values missing_values = [0, -9999, -4444, -3333] # retrieve data + its metadata if variable in vdata: # vdata should be expanded in the same way as the coordinates are expanded try: height_length = coords.get_coord('Height').shape[1] var = utils.expand_1d_to_2d_array(hdf.read_data(vdata[variable], "VD", missing_values), height_length, axis=1) except CoordinateNotFoundError: var = hdf.read_data(vdata[variable], "VD", missing_values) metadata = hdf.read_metadata(vdata[variable], "VD") elif variable in sdata: var = hdf.read_data(sdata[variable], "SD", missing_values) metadata = hdf.read_metadata(sdata[variable], "SD") else: raise ValueError("variable not found") return UngriddedData(var, metadata, coords)
def _create_coord_list(self, filenames, variable=None): import datetime as dt variables = ["Latitude", "Longitude", "Scan_Start_Time"] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) apply_interpolation = False if variable is not None: scale = self.__get_data_scale(filenames[0], variable) apply_interpolation = True if scale is "1km" else False lat = sdata["Latitude"] sd_lat = hdf.read_data(lat, "SD") lat_data = self.__field_interpolate(sd_lat) if apply_interpolation else sd_lat lat_metadata = hdf.read_metadata(lat, "SD") lat_coord = Coord(lat_data, lat_metadata, "Y") lon = sdata["Longitude"] lon_data = ( self.__field_interpolate(hdf.read_data(lon, "SD")) if apply_interpolation else hdf.read_data(lon, "SD") ) lon_metadata = hdf.read_metadata(lon, "SD") lon_coord = Coord(lon_data, lon_metadata, "X") time = sdata["Scan_Start_Time"] time_metadata = hdf.read_metadata(time, "SD") # Ensure the standard name is set time_metadata.standard_name = "time" time_coord = Coord(time, time_metadata, "T") time_coord.convert_TAI_time_to_std_time(dt.datetime(1993, 1, 1, 0, 0, 0)) return CoordList([lat_coord, lon_coord, time_coord])
def _create_coord_list(self, filenames): import numpy as np from cis.time_util import calculate_mid_time, cis_standard_time_unit variables = ["XDim", "YDim"] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) lat = sdata["YDim"] lat_metadata = hdf.read_metadata(lat, "SD") lon = sdata["XDim"] lon_metadata = hdf.read_metadata(lon, "SD") # expand lat and lon data array so that they have the same shape lat_data = utils.expand_1d_to_2d_array( hdf.read_data(lat, "SD"), lon_metadata.shape, axis=1 ) # expand latitude column wise lon_data = utils.expand_1d_to_2d_array( hdf.read_data(lon, "SD"), lat_metadata.shape, axis=0 ) # expand longitude row wise lat_metadata.shape = lat_data.shape lon_metadata.shape = lon_data.shape # to make sure "Latitude" and "Longitude", i.e. the standard_name is displayed instead of "YDim"and "XDim" lat_metadata.standard_name = "latitude" lat_metadata._name = "" lon_metadata.standard_name = "longitude" lon_metadata._name = "" # create arrays for time coordinate using the midpoint of the time delta between the start date and the end date time_data_array = [] for filename in filenames: mid_datetime = calculate_mid_time(self._get_start_date(filename), self._get_end_date(filename)) logging.debug("Using " + str(mid_datetime) + " as datetime for file " + str(filename)) # Only use part of the full lat shape as it has already been concatenated time_data = np.empty((lat_metadata.shape[0] / len(filenames), lat_metadata.shape[1]), dtype="float64") time_data.fill(mid_datetime) time_data_array.append(time_data) time_data = utils.concatenate(time_data_array) time_metadata = Metadata( name="DateTime", standard_name="time", shape=time_data.shape, units=str(cis_standard_time_unit), calendar=cis_standard_time_unit.calendar, ) coords = CoordList() coords.append(Coord(lon_data, lon_metadata, "X")) coords.append(Coord(lat_data, lat_metadata, "Y")) coords.append(Coord(time_data, time_metadata, "T")) return coords
def hdf_read(filenames, variable, start=None, count=None, stride=None): """Returns variable, concatenated over a sequence of files.""" from cis.data_io.hdf import read from cis.data_io.hdf_sd import get_metadata from cis.utils import concatenate sdata, _ = read(filenames, variable) var = sdata[variable] data = concatenate( [_get_MODIS_SDS_data(i, start, count, stride) for i in var]) metadata = get_metadata(var[0]) return data, metadata
def ncdf_read(filenames, variable, start=None, count=None, stride=None): """Returns variable, concatenated over a sequence of files.""" from cis.data_io.netcdf import read, get_metadata from cis.utils import concatenate, listify data = [] for f in listify(filenames): sdata = read(f, variable) var = sdata[variable] data.append(_tidy_ncdf_data(var, start, count, stride)) metadata = get_metadata(var) return concatenate(data), metadata
def create_data_object(self, filenames, variable): from pywork.CALIOP_utils import mask_data logging.debug("Creating *QC'd* data object for variable " + variable) # reading of variables sdata, vdata = hdf.read(filenames, [variable, "Pressure", "Extinction_QC_Flag_532", "CAD_Score"]) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") # reading coordinates # See if the variable is one dimensional (check the length of shape, neglecting length 1 dimensions) if len([l for l in metadata.shape if l > 1]) == 1: coords = self._create_one_dimensional_coord_list(filenames, index_offset=1) else: coords = self._create_coord_list(filenames, index_offset=1) if variable in MIXED_RESOLUTION_VARIABLES: logging.warning("Using Level 2 resolution profile for mixed resolution variable {}. See CALIPSO " "documentation for more details".format(variable)) callback = self._get_mixed_resolution_calipso_data else: callback = self._get_calipso_data var_data = hdf.read_data(sdata[variable], callback) extinction_qc = hdf.read_data(sdata["Extinction_QC_Flag_532"], self._get_mixed_resolution_calipso_data) cad_score = hdf.read_data(sdata["CAD_Score"], self._get_mixed_resolution_calipso_data) qcd_var_data, = mask_data(var_data, cad_score, extinction_qc) # reading coordinates if variable.startswith('Column'): coords = self._create_one_dimensional_coord_list(filenames, index_offset=1) else: coords = self._create_coord_list(filenames, index_offset=1) pres_data = hdf.read_data(sdata['Pressure'], self._get_calipso_data) pres_metadata = hdf.read_metadata(sdata['Pressure'], "SD") # Fix badly formatted units which aren't CF compliant and will break if they are aggregated if str(pres_metadata.units) == "hPA": pres_metadata.units = "hPa" qcd_pres_data = mask_data(pres_data, cad_score, extinction_qc) pres_coord = Coord(qcd_pres_data, pres_metadata, 'P') coords.append(pres_coord) return UngriddedData(qcd_var_data, metadata, coords)
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates # the variable here is needed to work out whether to apply interpolation to the lat/lon data or not coords = self._create_coord_list(filenames, variable) # reading of variables sdata, vdata = hdf.read(filenames, variable) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") return UngriddedData(var, metadata, coords)
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates # the variable here is needed to work out whether to apply interpolation to the lat/lon data or not coords = self._create_coord_list(filenames, variable) # reading of variables sdata, vdata = hdf.read(filenames, variable) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") return UngriddedData(var, metadata, coords, _get_MODIS_SDS_data)
def _create_coord_list(self, filenames, variable=None): import datetime as dt from cis.time_util import convert_time_since_to_std_time, cis_standard_time_unit from cis.utils import concatenate from cf_units import Unit from geotiepoints import modis5kmto1km variables = ['Latitude', 'Longitude', 'View_time'] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) apply_interpolation = False if variable is not None: scale = self.__get_data_scale(filenames[0], variable) apply_interpolation = True if scale is "1km" else False lat_data = hdf.read_data(sdata['Latitude'], _get_MODIS_SDS_data) lat_metadata = hdf.read_metadata(sdata['Latitude'], "SD") lon_data = hdf.read_data(sdata['Longitude'], _get_MODIS_SDS_data) lon_metadata = hdf.read_metadata(sdata['Longitude'], "SD") if apply_interpolation: lon_data, lat_data = modis5kmto1km(lon_data, lat_data) lat_coord = Coord(lat_data, lat_metadata, 'Y') lon_coord = Coord(lon_data, lon_metadata, 'X') time = sdata['View_time'] time_metadata = hdf.read_metadata(time, "SD") # Ensure the standard name is set time_metadata.standard_name = 'time' time_metadata.units = cis_standard_time_unit t_arrays = [] for f, d in zip(filenames, time): time_start = self._get_start_date(f) t_data = _get_MODIS_SDS_data( d) / 24.0 # Convert hours since to days since t_offset = time_start - dt.datetime(1600, 1, 1) # Convert to CIS time t_arrays.append(t_data + t_offset.days) time_coord = Coord(concatenate(t_arrays), time_metadata, "T") return CoordList([lat_coord, lon_coord, time_coord])
def _create_coord_list(self, filenames, variable=None): import datetime as dt from cis.time_util import convert_time_since_to_std_time, cis_standard_time_unit from cis.utils import concatenate from cf_units import Unit from geotiepoints import modis5kmto1km variables = ['Latitude', 'Longitude', 'View_time'] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) apply_interpolation = False if variable is not None: scale = self.__get_data_scale(filenames[0], variable) apply_interpolation = True if scale is "1km" else False lat_data = hdf.read_data(sdata['Latitude'], _get_MODIS_SDS_data) lat_metadata = hdf.read_metadata(sdata['Latitude'], "SD") lon_data = hdf.read_data(sdata['Longitude'], _get_MODIS_SDS_data) lon_metadata = hdf.read_metadata(sdata['Longitude'], "SD") if apply_interpolation: lon_data, lat_data = modis5kmto1km(lon_data, lat_data) lat_coord = Coord(lat_data, lat_metadata, 'Y') lon_coord = Coord(lon_data, lon_metadata, 'X') time = sdata['View_time'] time_metadata = hdf.read_metadata(time, "SD") # Ensure the standard name is set time_metadata.standard_name = 'time' time_metadata.units = cis_standard_time_unit t_arrays = [] for f, d in zip(filenames, time): time_start = self._get_start_date(f) t_data = _get_MODIS_SDS_data(d) / 24.0 # Convert hours since to days since t_offset = time_start - dt.datetime(1600, 1, 1) # Convert to CIS time t_arrays.append(t_data + t_offset.days) time_coord = Coord(concatenate(t_arrays), time_metadata, "T") return CoordList([lat_coord, lon_coord, time_coord])
def _create_one_dimensional_coord_list(self, filenames): from cis.time_util import cis_standard_time_unit # list of coordinate variables we are interested in variables = [ 'MODIS_latitude', 'MODIS_longitude', 'TAI_start', 'Profile_time' ] # reading the various files logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) # latitude lat = sdata['MODIS_latitude'] lat_data = hdf.read_data(lat, self._get_cloudsat_sds_data) lat_metadata = hdf.read_metadata(lat, "SD") lat_metadata.shape = lat_data.shape lat_metadata.standard_name = 'latitude' lat_coord = Coord(lat_data, lat_metadata) # longitude lon = sdata['MODIS_longitude'] lon_data = hdf.read_data(lon, self._get_cloudsat_sds_data) lon_metadata = hdf.read_metadata(lon, "SD") lon_metadata.shape = lon_data.shape lon_metadata.standard_name = 'longitude' lon_coord = Coord(lon_data, lon_metadata) # time coordinate time_data = self._generate_time_array(vdata) time_coord = Coord( time_data, Metadata(name='Profile_time', standard_name='time', shape=time_data.shape, units=cis_standard_time_unit), "X") # create object containing list of coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) coords.append(time_coord) return coords
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates # the variable here is needed to work out whether to apply interpolation to the lat/lon data or not coords = self._create_coord_list(filenames, variable) # reading of variables sdata, vdata = hdf.read(filenames, variable) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") # cut off the edges of the data... # TODO CHECK THIS IS ACTUALLY VALID BEFORE PUBLISHING ANYTHING WITH THIS d = hdf.read_data(var, _get_MODIS_SDS_data)[:, 2:-2] return UngriddedData(d, metadata, coords, _get_MODIS_SDS_data)
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading of variables sdata, vdata = hdf.read(filenames, variable) # reading (un-expanded) coordinates, since the data is 1-dimensional coords = self._create_one_dimensional_coord_list(filenames) # retrieve data + its metadata if variable in vdata: var = hdf.read_data(vdata[variable], self._get_cloudsat_vds_data) metadata = hdf.read_metadata(vdata[variable], "VD") elif variable in sdata: var = hdf.read_data(sdata[variable], self._get_cloudsat_sds_data) metadata = hdf.read_metadata(sdata[variable], "SD") else: raise ValueError("variable not found") return UngriddedData(var, metadata, coords)
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates # the variable here is needed to work out whether to apply interpolation to the lat/lon data or not coords = self._create_coord_list(filenames, variable) # reading of variables sdata, vdata = hdf.read(filenames, variable) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") # cut off the edges of the data... # TODO CHECK THIS IS ACTUALLY VALID BEFORE PUBLISHING ANYTHING WITH THIS d = hdf.read_data(var, _get_MODIS_SDS_data) if self.apply_interpolation: d = d[:, 2:-2] return UngriddedData(d, metadata, coords, _get_MODIS_SDS_data)
def _create_coord_list(self, filenames, variable=None): import datetime as dt variables = ['Latitude', 'Longitude', 'Scan_Start_Time'] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) self.apply_interpolation = False if variable is not None: scale = self.__get_data_scale(filenames[0], variable) self.apply_interpolation = scale == "1km" lat = sdata['Latitude'] lat_data = hdf.read_data(lat, _get_MODIS_SDS_data) lon = sdata['Longitude'] lon_data = hdf.read_data(lon, _get_MODIS_SDS_data) if self.apply_interpolation: lon_data, lat_data = modis5kmto1km(lon_data[:], lat_data[:]) lat_metadata = hdf.read_metadata(lat, "SD") lat_coord = Coord(lat_data, lat_metadata, 'Y') lon_metadata = hdf.read_metadata(lon, "SD") lon_coord = Coord(lon_data, lon_metadata, 'X') time = sdata['Scan_Start_Time'] time_metadata = hdf.read_metadata(time, "SD") # Ensure the standard name is set time_metadata.standard_name = 'time' time_data = hdf.read_data(time, _get_MODIS_SDS_data) if self.apply_interpolation: time_data = np.repeat(np.repeat(time_data, 5, axis=0), 5, axis=1) time_coord = Coord(time_data, time_metadata, "T") time_coord.convert_TAI_time_to_std_time( dt.datetime(1993, 1, 1, 0, 0, 0)) return CoordList([lat_coord, lon_coord, time_coord])
def _create_coord_list(self, filenames, variable=None): import datetime as dt variables = ['Latitude', 'Longitude', 'Scan_Start_Time'] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) apply_interpolation = False if variable is not None: scale = self.__get_data_scale(filenames[0], variable) apply_interpolation = True if scale is "1km" else False lat = sdata['Latitude'] sd_lat = hdf.read_data(lat, _get_MODIS_SDS_data) lat_data = self.__field_interpolate( sd_lat) if apply_interpolation else sd_lat lat_metadata = hdf.read_metadata(lat, "SD") lat_coord = Coord(lat_data, lat_metadata, 'Y') lon = sdata['Longitude'] if apply_interpolation: lon_data = self.__field_interpolate( hdf.read_data(lon, _get_MODIS_SDS_data)) else: lon_data = hdf.read_data(lon, _get_MODIS_SDS_data) lon_metadata = hdf.read_metadata(lon, "SD") lon_coord = Coord(lon_data, lon_metadata, 'X') time = sdata['Scan_Start_Time'] time_metadata = hdf.read_metadata(time, "SD") # Ensure the standard name is set time_metadata.standard_name = 'time' time_coord = Coord(time, time_metadata, "T", _get_MODIS_SDS_data) time_coord.convert_TAI_time_to_std_time( dt.datetime(1993, 1, 1, 0, 0, 0)) return CoordList([lat_coord, lon_coord, time_coord])
def create_data_object(self, filenames, variable): from itertools import product logging.debug("Creating data object for variable " + variable) # reading coordinates # the variable here is needed to work out whether to apply interpolation to the lat/lon data or not coords = self._create_coord_list(filenames, variable) # reading of variables sdata, vdata = hdf.read(filenames, variable) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") # Check the dimension of this variable _, ndim, dim_len, _, _ = var[0].info() if ndim == 2: return UngriddedData(var, metadata, coords, _get_MODIS_SDS_data) elif ndim < 2: raise NotImplementedError("1D field in MODIS L2 data.") else: result = UngriddedDataList() # Iterate over all but the last two dimensions ranges = [range(n) for n in dim_len[:-2]] for indices in product(*ranges): for manager in var: manager._start = list(indices) + [0, 0] manager._count = [1 ] * len(indices) + manager.info()[2][-2:] result.append( UngriddedData(var, metadata, coords.copy(), _get_MODIS_SDS_data)) return result
def _create_one_dimensional_coord_list(self, filenames): from cis.time_util import cis_standard_time_unit # list of coordinate variables we are interested in variables = ['MODIS_latitude', 'MODIS_longitude', 'TAI_start', 'Profile_time'] # reading the various files logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) # latitude lat = sdata['MODIS_latitude'] lat_data = hdf.read_data(lat, self._get_cloudsat_sds_data) lat_metadata = hdf.read_metadata(lat, "SD") lat_metadata.shape = lat_data.shape lat_metadata.standard_name = 'latitude' lat_coord = Coord(lat_data, lat_metadata) # longitude lon = sdata['MODIS_longitude'] lon_data = hdf.read_data(lon, self._get_cloudsat_sds_data) lon_metadata = hdf.read_metadata(lon, "SD") lon_metadata.shape = lon_data.shape lon_metadata.standard_name = 'longitude' lon_coord = Coord(lon_data, lon_metadata) # time coordinate time_data = self._generate_time_array(vdata) time_coord = Coord(time_data, Metadata(name='Profile_time', standard_name='time', shape=time_data.shape, units=cis_standard_time_unit), "X") # create object containing list of coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) coords.append(time_coord) return coords
def _create_coord_list(self, filenames, variable=None): import datetime as dt variables = ['Latitude', 'Longitude', 'Scan_Start_Time'] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) apply_interpolation = False if variable is not None: scale = self.__get_data_scale(filenames[0], variable) apply_interpolation = True if scale is "1km" else False lat = sdata['Latitude'] lat_data = hdf.read_data(lat, _get_MODIS_SDS_data) lon = sdata['Longitude'] lon_data = hdf.read_data(lon, _get_MODIS_SDS_data) if apply_interpolation: lon_data, lat_data = modis5kmto1km(lon_data[:], lat_data[:]) lat_metadata = hdf.read_metadata(lat, "SD") lat_coord = Coord(lat_data, lat_metadata, 'Y') lon_metadata = hdf.read_metadata(lon, "SD") lon_coord = Coord(lon_data, lon_metadata, 'X') time = sdata['Scan_Start_Time'] time_metadata = hdf.read_metadata(time, "SD") # Ensure the standard name is set time_metadata.standard_name = 'time' time_data = hdf.read_data(time, _get_MODIS_SDS_data) time_data = np.repeat(np.repeat(time_data, 5, axis=0), 5, axis=1) time_coord = Coord(time_data, time_metadata, "T") time_coord.convert_TAI_time_to_std_time(dt.datetime(1993, 1, 1, 0, 0, 0)) return CoordList([lat_coord, lon_coord, time_coord])
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates if variable.startswith('Column'): coords = self._create_one_dimensional_coord_list(filenames, index_offset=1) else: coords = self._create_coord_list(filenames, index_offset=1) # reading of variables sdata, vdata = hdf.read(filenames, variable) # retrieve data + its metadata var = sdata[variable] metadata = hdf.read_metadata(var, "SD") if variable in MIXED_RESOLUTION_VARIABLES: logging.warning("Using Level 2 resolution profile for mixed resolution variable {}. See CALIPSO " "documentation for more details".format(variable)) callback = self._get_mixed_resolution_calipso_data else: callback = self._get_calipso_data return UngriddedData(var, metadata, coords, callback)
def _create_coord_list(self, filenames): from cis.time_util import cis_standard_time_unit # list of coordinate variables we are interested in variables = [ 'Latitude', 'Longitude', 'TAI_start', 'Profile_time', 'Height' ] # reading the various files try: logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) # altitude coordinate height = sdata['Height'] height_data = hdf.read_data(height, self._get_cloudsat_sds_data) height_metadata = hdf.read_metadata(height, "SD") height_coord = Coord(height_data, height_metadata, "Y") except InvalidVariableError: # This means we are reading a Cloudsat file without height, so remove height from the variables list variables.remove('Height') logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) height_data = None height_coord = None # latitude lat = vdata['Latitude'] lat_data = hdf.read_data(lat, self._get_cloudsat_vds_data) if height_data is not None: lat_data = utils.expand_1d_to_2d_array(lat_data, len(height_data[0]), axis=1) lat_metadata = hdf.read_metadata(lat, "VD") lat_metadata.shape = lat_data.shape lat_coord = Coord(lat_data, lat_metadata) # longitude lon = vdata['Longitude'] lon_data = hdf.read_data(lon, self._get_cloudsat_vds_data) if height_data is not None: lon_data = utils.expand_1d_to_2d_array(lon_data, len(height_data[0]), axis=1) lon_metadata = hdf.read_metadata(lon, "VD") lon_metadata.shape = lon_data.shape lon_coord = Coord(lon_data, lon_metadata) # time coordinate time_data = self._generate_time_array(vdata) if height_data is not None: time_data = utils.expand_1d_to_2d_array(time_data, len(height_data[0]), axis=1) time_coord = Coord( time_data, Metadata(name='Profile_time', standard_name='time', shape=time_data.shape, units=cis_standard_time_unit), "X") # create object containing list of coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) if height_coord is not None: coords.append(height_coord) coords.append(time_coord) return coords