Example #1
0
def test_that_can_convert_tai_to_datetime_obj():
    import numpy as np
    a = np.arange(6).reshape(2, 3)
    b = convert_sec_since_to_std_time_array(a, dt.datetime(1993, 1, 1))
    eq_(a.shape, b.shape)
    assert_almost_equal(b[0][0], days_since_standard_epoch)
    assert_almost_equal(b[0][1], days_since_standard_epoch+1*sec)
    assert_almost_equal(b[0][2], days_since_standard_epoch+2*sec)
    assert_almost_equal(b[1][0], days_since_standard_epoch+3*sec)
    assert_almost_equal(b[1][1], days_since_standard_epoch+4*sec)
    assert_almost_equal(b[1][2], days_since_standard_epoch+5*sec)
Example #2
0
    def _generate_time_array(self, vdata):
        import cis.data_io.hdf_vd as hdf_vd
        import datetime as dt
        from cis.time_util import convert_sec_since_to_std_time_array

        Cloudsat_start_time = dt.datetime(1993, 1, 1, 0, 0, 0)

        arrays = []
        for i, j in zip(vdata['Profile_time'], vdata['TAI_start']):
            time = hdf_vd.get_data(i)
            start = hdf_vd.get_data(j)
            time += start
            # Do the conversion to standard time here before we expand the time array...
            time = convert_sec_since_to_std_time_array(time, Cloudsat_start_time)
            arrays.append(time)
        return utils.concatenate(arrays)
Example #3
0
    def _create_coord_list(self, filenames, index_offset=0):
        from cis.data_io.hdf_vd import get_data
        from cis.data_io.hdf_vd import VDS
        from pyhdf.error import HDF4Error
        from cis.data_io import hdf_sd
        import datetime as dt
        from cis.time_util import convert_sec_since_to_std_time_array, cis_standard_time_unit

        variables = ['Latitude', 'Longitude', "Profile_Time", "Pressure"]
        logging.info("Listing coordinates: " + str(variables))

        # reading data from files
        sdata = {}
        for filename in filenames:
            try:
                sds_dict = hdf_sd.read(filename, variables)
            except HDF4Error as e:
                raise IOError(str(e))

            for var in sds_dict.keys():
                utils.add_element_to_list_in_dict(sdata, var, sds_dict[var])

        alt_name = "altitude"
        logging.info("Additional coordinates: '" + alt_name + "'")

        # work out size of data arrays
        # the coordinate variables will be reshaped to match that.
        # NOTE: This assumes that all Caliop_L1 files have the same altitudes.
        #       If this is not the case, then the following line will need to be changed
        #       to concatenate the data from all the files and not just arbitrarily pick
        #       the altitudes from the first file.
        alt_data = get_data(VDS(filenames[0], "Lidar_Data_Altitudes"), True)
        alt_data *= 1000.0  # Convert to m
        len_x = alt_data.shape[0]

        lat_data = hdf.read_data(sdata['Latitude'], "SD")
        len_y = lat_data.shape[0]

        new_shape = (len_x, len_y)

        # altitude
        alt_data = utils.expand_1d_to_2d_array(alt_data, len_y, axis=0)
        alt_metadata = Metadata(name=alt_name, standard_name=alt_name, shape=new_shape)
        alt_coord = Coord(alt_data, alt_metadata)

        # pressure
        pres_data = hdf.read_data(sdata['Pressure'], "SD")
        pres_metadata = hdf.read_metadata(sdata['Pressure'], "SD")
        # Fix badly formatted units which aren't CF compliant and will break if they are aggregated
        if pres_metadata.units == "hPA":
            pres_metadata.units = "hPa"
        pres_metadata.shape = new_shape
        pres_coord = Coord(pres_data, pres_metadata, 'P')

        # latitude
        lat_data = utils.expand_1d_to_2d_array(lat_data[:, index_offset], len_x, axis=1)
        lat_metadata = hdf.read_metadata(sdata['Latitude'], "SD")
        lat_metadata.shape = new_shape
        lat_coord = Coord(lat_data, lat_metadata, 'Y')

        # longitude
        lon = sdata['Longitude']
        lon_data = hdf.read_data(lon, "SD")
        lon_data = utils.expand_1d_to_2d_array(lon_data[:, index_offset], len_x, axis=1)
        lon_metadata = hdf.read_metadata(lon, "SD")
        lon_metadata.shape = new_shape
        lon_coord = Coord(lon_data, lon_metadata, 'X')

        # profile time, x
        time = sdata['Profile_Time']
        time_data = hdf.read_data(time, "SD")
        time_data = convert_sec_since_to_std_time_array(time_data, dt.datetime(1993, 1, 1, 0, 0, 0))
        time_data = utils.expand_1d_to_2d_array(time_data[:, index_offset], len_x, axis=1)
        time_coord = Coord(time_data, Metadata(name='Profile_Time', standard_name='time', shape=time_data.shape,
                                               units=str(cis_standard_time_unit),
                                               calendar=cis_standard_time_unit.calendar), "T")

        # create the object containing all coordinates
        coords = CoordList()
        coords.append(lat_coord)
        coords.append(lon_coord)
        coords.append(time_coord)
        coords.append(alt_coord)
        if pres_data.shape == alt_data.shape:
            # For MODIS L1 this may is not be true, so skips the air pressure reading. If required for MODIS L1 then
            # some kind of interpolation of the air pressure would be required, as it is on a different (smaller) grid
            # than for the Lidar_Data_Altitudes.
            coords.append(pres_coord)

        return coords
Example #4
0
File: Coord.py Project: cpaulik/cis
 def convert_TAI_time_to_std_time(self, ref):
     from cis.time_util import convert_sec_since_to_std_time_array, cis_standard_time_unit
     self._data = convert_sec_since_to_std_time_array(self.data, ref)
     self.units = str(cis_standard_time_unit)
     self.metadata.calendar = cis_standard_time_unit.calendar