def _add_aux_coordinate(dim_coords, filename, aux_coord_name, length): """ Add an auxiliary coordinate to a list of (reshaped) dimension coordinates :param dim_coords: CoordList of one-dimensional coordinates representing physical dimensions :param filename: The data file containing the aux coord :param aux_coord_name: The name of the aux coord to add to the coord list :param length: The length of the data dimensions which this auxiliary coordinate should span :return: A CoordList of reshaped (2D) physical coordinates plus the 2D auxiliary coordinate """ from cis.data_io.Coord import Coord from cis.utils import expand_1d_to_2d_array from cis.data_io.netcdf import read # We assume that the auxilliary coordinate is the same shape across files d = read(filename, [aux_coord_name])[aux_coord_name] # Reshape to the length given aux_data = expand_1d_to_2d_array(d[:], length, axis=0) # Get the length of the auxiliary coordinate len_y = d[:].size for dim_coord in dim_coords: dim_coord.data = expand_1d_to_2d_array(dim_coord.data, len_y, axis=1) all_coords = dim_coords + [Coord(aux_data, get_metadata(d))] return all_coords
def _create_coord_list(self, filenames): from cis.time_util import cis_standard_time_unit # list of coordinate variables we are interested in variables = ['Latitude', 'Longitude', 'TAI_start', 'Profile_time', 'Height'] # reading the various files try: logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) # altitude coordinate height = sdata['Height'] height_data = hdf.read_data(height, "SD") height_metadata = hdf.read_metadata(height, "SD") height_coord = Coord(height_data, height_metadata, "Y") except InvalidVariableError: # This means we are reading a Cloudsat file without height, so remove height from the variables list variables.remove('Height') logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) height_data = None height_coord = None # latitude lat = vdata['Latitude'] lat_data = hdf.read_data(lat, "VD") if height_data is not None: lat_data = utils.expand_1d_to_2d_array(lat_data, len(height_data[0]), axis=1) lat_metadata = hdf.read_metadata(lat, "VD") lat_metadata.shape = lat_data.shape lat_coord = Coord(lat_data, lat_metadata) # longitude lon = vdata['Longitude'] lon_data = hdf.read_data(lon, "VD") if height_data is not None: lon_data = utils.expand_1d_to_2d_array(lon_data, len(height_data[0]), axis=1) lon_metadata = hdf.read_metadata(lon, "VD") lon_metadata.shape = lon_data.shape lon_coord = Coord(lon_data, lon_metadata) # time coordinate time_data = self._generate_time_array(vdata) if height_data is not None: time_data = utils.expand_1d_to_2d_array(time_data, len(height_data[0]), axis=1) time_coord = Coord(time_data, Metadata(name='Profile_time', standard_name='time', shape=time_data.shape, units=str(cis_standard_time_unit), calendar=cis_standard_time_unit.calendar), "X") # create object containing list of coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) if height_coord is not None: coords.append(height_coord) coords.append(time_coord) return coords
def _create_coord_list(self, filename): import numpy as np coords = CoordList() time_data = read(filename, 'time')['time'] len_x = time_data.shape[0] try: alt_data = read(filename, 'altitude')['altitude'] except InvalidVariableError: alt_data = read(filename, 'range')['range'] len_y = alt_data.shape[0] time_arr = utils.expand_1d_to_2d_array(time_data[:], len_y, axis=1) t_coord = Coord(time_arr, get_metadata(time_data), axis='x') t_coord.convert_to_std_time() coords.append(t_coord) alt_arr = utils.expand_1d_to_2d_array(alt_data[:], len_x, axis=0) coords.append(Coord(alt_arr, get_metadata(alt_data), axis='y')) lat_data = read(filename, 'latitude')['latitude'] lat_arr = np.ones(alt_arr.shape) * lat_data[:] coords.append(Coord(lat_arr, get_metadata(lat_data))) lon_data = read(filename, 'longitude')['longitude'] lon_arr = np.ones(alt_arr.shape) * lon_data[:] coords.append(Coord(lon_arr, get_metadata(lon_data))) return coords
def _create_coord_list(self, filenames): import numpy as np from cis.time_util import calculate_mid_time, cis_standard_time_unit variables = ["XDim", "YDim"] logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) lat = sdata["YDim"] lat_metadata = hdf.read_metadata(lat, "SD") lon = sdata["XDim"] lon_metadata = hdf.read_metadata(lon, "SD") # expand lat and lon data array so that they have the same shape lat_data = utils.expand_1d_to_2d_array( hdf.read_data(lat, "SD"), lon_metadata.shape, axis=1 ) # expand latitude column wise lon_data = utils.expand_1d_to_2d_array( hdf.read_data(lon, "SD"), lat_metadata.shape, axis=0 ) # expand longitude row wise lat_metadata.shape = lat_data.shape lon_metadata.shape = lon_data.shape # to make sure "Latitude" and "Longitude", i.e. the standard_name is displayed instead of "YDim"and "XDim" lat_metadata.standard_name = "latitude" lat_metadata._name = "" lon_metadata.standard_name = "longitude" lon_metadata._name = "" # create arrays for time coordinate using the midpoint of the time delta between the start date and the end date time_data_array = [] for filename in filenames: mid_datetime = calculate_mid_time(self._get_start_date(filename), self._get_end_date(filename)) logging.debug("Using " + str(mid_datetime) + " as datetime for file " + str(filename)) # Only use part of the full lat shape as it has already been concatenated time_data = np.empty((lat_metadata.shape[0] / len(filenames), lat_metadata.shape[1]), dtype="float64") time_data.fill(mid_datetime) time_data_array.append(time_data) time_data = utils.concatenate(time_data_array) time_metadata = Metadata( name="DateTime", standard_name="time", shape=time_data.shape, units=str(cis_standard_time_unit), calendar=cis_standard_time_unit.calendar, ) coords = CoordList() coords.append(Coord(lon_data, lon_metadata, "X")) coords.append(Coord(lat_data, lat_metadata, "Y")) coords.append(Coord(time_data, time_metadata, "T")) return coords
def make_mock_CALIOP_data(data, name='', units=''): from cis.utils import expand_1d_to_2d_array vertical_levels, swath_length = data.shape lat = Coord( expand_1d_to_2d_array(np.arange(swath_length), vertical_levels), Metadata(standard_name='latitude')) lon = Coord( expand_1d_to_2d_array(np.arange(swath_length), vertical_levels), Metadata(standard_name='longitude')) alt = Coord( expand_1d_to_2d_array(np.arange(vertical_levels), swath_length, axis=1), Metadata(standard_name='altitude')) return UngriddedData(data, Metadata(name, units=units), [lat, lon, alt])
def _create_coord_list(self, filename): import numpy as np coords = CoordList() time_data = read(filename, 'time')['time'] try: alt_data = read(filename, 'altitude')['altitude'] except InvalidVariableError: alt_data = read(filename, 'range')['range'] len_y = alt_data.shape[1] time_arr = utils.expand_1d_to_2d_array(time_data[:], len_y, axis=1) t_coord = Coord(time_arr, get_metadata(time_data), axis='x') t_coord.convert_to_std_time() coords.append(t_coord) #alt_arr = utils.expand_1d_to_2d_array(alt_data[:], len_x, axis=0) alt_arr = alt_data[:, :, 0] #eliminate "angle" axis #alt_arr = alt_data #eliminate "angle" axis coords.append(Coord(alt_arr, get_metadata(alt_data), axis='y')) lat_data = read(filename, 'latitude')['latitude'] lat_arr = np.ones(alt_arr.shape) * lat_data[:] coords.append(Coord(lat_arr, get_metadata(lat_data))) lon_data = read(filename, 'longitude')['longitude'] lon_arr = np.ones(alt_arr.shape) * lon_data[:] coords.append(Coord(lon_arr, get_metadata(lon_data))) return coords
def create_data_object(self, filenames, variable): logging.debug("Creating data object for variable " + variable) # reading coordinates coords = self._create_coord_list(filenames) # reading of variables sdata, vdata = hdf.read(filenames, variable) # missing values missing_values = [0, -9999, -4444, -3333] # retrieve data + its metadata if variable in vdata: # vdata should be expanded in the same way as the coordinates are expanded try: height_length = coords.get_coord('Height').shape[1] var = utils.expand_1d_to_2d_array(hdf.read_data(vdata[variable], "VD", missing_values), height_length, axis=1) except CoordinateNotFoundError: var = hdf.read_data(vdata[variable], "VD", missing_values) metadata = hdf.read_metadata(vdata[variable], "VD") elif variable in sdata: var = hdf.read_data(sdata[variable], "SD", missing_values) metadata = hdf.read_metadata(sdata[variable], "SD") else: raise ValueError("variable not found") return UngriddedData(var, metadata, coords)
def test_changing_an_element_of_expanded_array_raises_error(self): import numpy as np from cis.utils import expand_1d_to_2d_array a = np.array([1, 2, 3, 4]) b = expand_1d_to_2d_array(a, 5, axis=0) b[1, 4] = 42
def test_changing_an_element_of_expanded_array_raises_error(self): import numpy as np from cis.utils import expand_1d_to_2d_array a = np.array([1, 2, 3, 4]) b = expand_1d_to_2d_array(a, 5, axis=0) b[1,4] = 42
def test_can_expand_1d_array_down(self): import numpy as np from cis.utils import expand_1d_to_2d_array a = np.array([1, 2, 3, 4]) b = expand_1d_to_2d_array(a, 4, axis=1) ref = np.array([[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3], [4, 4, 4, 4]]) assert (np.equal(b, ref).all())
def test_can_expand_1d_array_across(self): import numpy as np from cis.utils import expand_1d_to_2d_array a = np.array([1, 2, 3, 4]) b = expand_1d_to_2d_array(a, 4, axis=0) ref = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]]) assert (np.equal(b, ref).all())
def test_can_expand_1d_array_down(self): import numpy as np from cis.utils import expand_1d_to_2d_array a = np.array([1, 2, 3, 4]) b = expand_1d_to_2d_array(a, 5, axis=1) ref = np.array([[1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]]) assert (np.equal(b, ref).all())
def test_can_expand_1d_array_across(self): import numpy as np from cis.utils import expand_1d_to_2d_array a = np.array([1, 2, 3, 4]) b = expand_1d_to_2d_array(a, 5, axis=0) ref = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]]) assert (np.equal(b, ref).all())
def _create_coord_list(self, filenames, index_offset=0): import logging from cis.data_io import hdf as hdf from cis.data_io.Coord import Coord, CoordList from cis.data_io.ungridded_data import Metadata import cis.utils as utils from cis.data_io.hdf_vd import VDS from pyhdf.error import HDF4Error from cis.data_io import hdf_sd import datetime as dt from cis.time_util import convert_sec_since_to_std_time, cis_standard_time_unit variables = ['Latitude', 'Longitude', "Profile_Time", "Pressure"] logging.info("Listing coordinates: " + str(variables)) # reading data from files sdata = {} for filename in filenames: try: sds_dict = hdf_sd.read(filename, variables) except HDF4Error as e: raise IOError(str(e)) for var in list(sds_dict.keys()): utils.add_element_to_list_in_dict(sdata, var, sds_dict[var]) alt_name = "altitude" logging.info("Additional coordinates: '" + alt_name + "'") # work out size of data arrays # the coordinate variables will be reshaped to match that. # NOTE: This assumes that all Caliop_L1 files have the same altitudes. # If this is not the case, then the following line will need to be changed # to concatenate the data from all the files and not just arbitrarily pick # the altitudes from the first file. alt_data = get_data(VDS(filenames[0], "Lidar_Data_Altitudes"), True) alt_data *= 1000.0 # Convert to m len_x = alt_data.shape[0] lat_data = hdf.read_data(sdata['Latitude'], self._get_calipso_data) len_y = lat_data.shape[0] new_shape = (len_x, len_y) # altitude alt_data = utils.expand_1d_to_2d_array(alt_data, len_y, axis=0) alt_metadata = Metadata(name=alt_name, standard_name=alt_name, shape=new_shape) alt_coord = Coord(alt_data, alt_metadata) # pressure if self.include_pressure: pres_data = hdf.read_data(sdata['Pressure'], self._get_calipso_data) pres_metadata = hdf.read_metadata(sdata['Pressure'], "SD") # Fix badly formatted units which aren't CF compliant and will break if they are aggregated if str(pres_metadata.units) == "hPA": pres_metadata.units = "hPa" pres_metadata.shape = new_shape pres_coord = Coord(pres_data, pres_metadata, 'P') # latitude lat_data = utils.expand_1d_to_2d_array(lat_data[:, index_offset], len_x, axis=1) lat_metadata = hdf.read_metadata(sdata['Latitude'], "SD") lat_metadata.shape = new_shape lat_coord = Coord(lat_data, lat_metadata, 'Y') # longitude lon = sdata['Longitude'] lon_data = hdf.read_data(lon, self._get_calipso_data) lon_data = utils.expand_1d_to_2d_array(lon_data[:, index_offset], len_x, axis=1) lon_metadata = hdf.read_metadata(lon, "SD") lon_metadata.shape = new_shape lon_coord = Coord(lon_data, lon_metadata, 'X') # profile time, x time = sdata['Profile_Time'] time_data = hdf.read_data(time, self._get_calipso_data) time_data = convert_sec_since_to_std_time(time_data, dt.datetime(1993, 1, 1, 0, 0, 0)) time_data = utils.expand_1d_to_2d_array(time_data[:, index_offset], len_x, axis=1) time_coord = Coord(time_data, Metadata(name='Profile_Time', standard_name='time', shape=time_data.shape, units=cis_standard_time_unit), "T") # create the object containing all coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) coords.append(time_coord) coords.append(alt_coord) if self.include_pressure and (pres_data.shape == alt_data.shape): # For MODIS L1 this may is not be true, so skips the air pressure reading. If required for MODIS L1 then # some kind of interpolation of the air pressure would be required, as it is on a different (smaller) grid # than for the Lidar_Data_Altitudes. coords.append(pres_coord) return coords
def _create_coord_list(self, filenames): from cis.time_util import cis_standard_time_unit # list of coordinate variables we are interested in variables = [ 'Latitude', 'Longitude', 'TAI_start', 'Profile_time', 'Height' ] # reading the various files try: logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) # altitude coordinate height = sdata['Height'] height_data = hdf.read_data(height, self._get_cloudsat_sds_data) height_metadata = hdf.read_metadata(height, "SD") height_coord = Coord(height_data, height_metadata, "Y") except InvalidVariableError: # This means we are reading a Cloudsat file without height, so remove height from the variables list variables.remove('Height') logging.info("Listing coordinates: " + str(variables)) sdata, vdata = hdf.read(filenames, variables) height_data = None height_coord = None # latitude lat = vdata['Latitude'] lat_data = hdf.read_data(lat, self._get_cloudsat_vds_data) if height_data is not None: lat_data = utils.expand_1d_to_2d_array(lat_data, len(height_data[0]), axis=1) lat_metadata = hdf.read_metadata(lat, "VD") lat_metadata.shape = lat_data.shape lat_coord = Coord(lat_data, lat_metadata) # longitude lon = vdata['Longitude'] lon_data = hdf.read_data(lon, self._get_cloudsat_vds_data) if height_data is not None: lon_data = utils.expand_1d_to_2d_array(lon_data, len(height_data[0]), axis=1) lon_metadata = hdf.read_metadata(lon, "VD") lon_metadata.shape = lon_data.shape lon_coord = Coord(lon_data, lon_metadata) # time coordinate time_data = self._generate_time_array(vdata) if height_data is not None: time_data = utils.expand_1d_to_2d_array(time_data, len(height_data[0]), axis=1) time_coord = Coord( time_data, Metadata(name='Profile_time', standard_name='time', shape=time_data.shape, units=cis_standard_time_unit), "X") # create object containing list of coordinates coords = CoordList() coords.append(lat_coord) coords.append(lon_coord) if height_coord is not None: coords.append(height_coord) coords.append(time_coord) return coords