Пример #1
0
    def _create_cube(self, filenames, variable):
        import numpy as np
        from cis.data_io.hdf import _read_hdf4
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord, AuxCoord
        from cis.time_util import calculate_mid_time, cis_standard_time_unit
        from cis.data_io.hdf_sd import get_metadata
        from cf_units import Unit

        variables = ['XDim', 'YDim', variable]
        logging.info("Listing coordinates: " + str(variables))

        cube_list = CubeList()
        # Read each file individually, let Iris do the merging at the end.
        for f in filenames:
            sdata, vdata = _read_hdf4(f, variables)

            lat_coord = DimCoord(_get_MODIS_SDS_data(sdata['YDim']),
                                 standard_name='latitude',
                                 units='degrees')
            lon_coord = DimCoord(_get_MODIS_SDS_data(sdata['XDim']),
                                 standard_name='longitude',
                                 units='degrees')

            # create time coordinate using the midpoint of the time delta between the start date and the end date
            start_datetime = self._get_start_date(f)
            end_datetime = self._get_end_date(f)
            mid_datetime = calculate_mid_time(start_datetime, end_datetime)
            logging.debug("Using {} as datetime for file {}".format(
                mid_datetime, f))
            time_coord = AuxCoord(mid_datetime,
                                  standard_name='time',
                                  units=cis_standard_time_unit,
                                  bounds=[start_datetime, end_datetime])

            var = sdata[variable]
            metadata = get_metadata(var)

            try:
                units = Unit(metadata.units)
            except ValueError:
                logging.warning(
                    "Unable to parse units '{}' in {} for {}.".format(
                        metadata.units, f, variable))
                units = None

            cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
                        dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
                        aux_coords_and_dims=[(time_coord, None)],
                        var_name=metadata._name,
                        long_name=metadata.long_name,
                        units=units)

            cube_list.append(cube)

        # Merge the cube list across the scalar time coordinates before returning a single cube.
        return cube_list.merge_cube()
Пример #2
0
 def test_that_can_calculate_mid_point_between_two_datetime(self):
     from cis.time_util import calculate_mid_time
     t1 = convert_datetime_to_std_time(dt.datetime(2010, 0o2, 0o5, 0, 0, 0))
     t2 = convert_datetime_to_std_time(dt.datetime(2010, 0o2, 0o6, 0, 0, 0))
     tm = calculate_mid_time(t1, t2)
     eq_(
         tm,
         convert_datetime_to_std_time(dt.datetime(2010, 0o2, 0o5, 12, 0,
                                                  0)))
Пример #3
0
    def _create_coord_list(self, filenames):
        import numpy as np
        from cis.time_util import calculate_mid_time, cis_standard_time_unit

        variables = ["XDim", "YDim"]
        logging.info("Listing coordinates: " + str(variables))

        sdata, vdata = hdf.read(filenames, variables)

        lat = sdata["YDim"]
        lat_metadata = hdf.read_metadata(lat, "SD")

        lon = sdata["XDim"]
        lon_metadata = hdf.read_metadata(lon, "SD")

        # expand lat and lon data array so that they have the same shape
        lat_data = utils.expand_1d_to_2d_array(
            hdf.read_data(lat, "SD"), lon_metadata.shape, axis=1
        )  # expand latitude column wise
        lon_data = utils.expand_1d_to_2d_array(
            hdf.read_data(lon, "SD"), lat_metadata.shape, axis=0
        )  # expand longitude row wise

        lat_metadata.shape = lat_data.shape
        lon_metadata.shape = lon_data.shape

        # to make sure "Latitude" and "Longitude", i.e. the standard_name is displayed instead of "YDim"and "XDim"
        lat_metadata.standard_name = "latitude"
        lat_metadata._name = ""
        lon_metadata.standard_name = "longitude"
        lon_metadata._name = ""

        # create arrays for time coordinate using the midpoint of the time delta between the start date and the end date
        time_data_array = []
        for filename in filenames:
            mid_datetime = calculate_mid_time(self._get_start_date(filename), self._get_end_date(filename))
            logging.debug("Using " + str(mid_datetime) + " as datetime for file " + str(filename))
            # Only use part of the full lat shape as it has already been concatenated
            time_data = np.empty((lat_metadata.shape[0] / len(filenames), lat_metadata.shape[1]), dtype="float64")
            time_data.fill(mid_datetime)
            time_data_array.append(time_data)
        time_data = utils.concatenate(time_data_array)
        time_metadata = Metadata(
            name="DateTime",
            standard_name="time",
            shape=time_data.shape,
            units=str(cis_standard_time_unit),
            calendar=cis_standard_time_unit.calendar,
        )

        coords = CoordList()
        coords.append(Coord(lon_data, lon_metadata, "X"))
        coords.append(Coord(lat_data, lat_metadata, "Y"))
        coords.append(Coord(time_data, time_metadata, "T"))

        return coords
Пример #4
0
    def _create_cube(self, filenames, variable):
        import numpy as np
        from cis.data_io.hdf import _read_hdf4
        from cis.data_io import hdf_vd
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord, AuxCoord
        from cis.time_util import calculate_mid_time, cis_standard_time_unit
        from cis.data_io.hdf_sd import get_metadata
        from cf_units import Unit

        variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
        logging.info("Listing coordinates: " + str(variables))

        cube_list = CubeList()
        # Read each file individually, let Iris do the merging at the end.
        for f in filenames:
            sdata, vdata = _read_hdf4(f, variables)

            lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid']))
            lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid']))

            lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees')
            lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees')

            # create time coordinate using the midpoint of the time delta between the start date and the end date
            start_datetime = self._get_start_date(f)
            end_datetime = self._get_end_date(f)
            mid_datetime = calculate_mid_time(start_datetime, end_datetime)
            logging.debug("Using {} as datetime for file {}".format(mid_datetime, f))
            time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit,
                                  bounds=[start_datetime, end_datetime])

            var = sdata[variable]
            metadata = get_metadata(var)

            try:
                units = Unit(metadata.units)
            except ValueError:
                logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable))
                units = None

            cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
                        dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
                        aux_coords_and_dims=[(time_coord, None)],
                        var_name=metadata._name, long_name=metadata.long_name, units=units)

            cube_list.append(cube)

        # Merge the cube list across the scalar time coordinates before returning a single cube.
        return cube_list.merge_cube()
Пример #5
0
def test_that_can_calculate_mid_point_between_two_datetime():
    from cis.time_util import calculate_mid_time
    t1 = convert_datetime_to_std_time(dt.datetime(2010, 02, 05, 0, 0, 0))
    t2 = convert_datetime_to_std_time(dt.datetime(2010, 02, 06, 0, 0, 0))
    tm = calculate_mid_time(t1, t2)
    eq_(tm, convert_datetime_to_std_time(dt.datetime(2010, 02, 05, 12, 0, 0)))