예제 #1
0
파일: hdf.py 프로젝트: cedadev/cis
def read_metadata(data_dict, data_type):
    if data_type == 'VD':
        out = hdf_vd.get_metadata(data_dict[0])
    elif data_type == 'SD':
        out = hdf_sd.get_metadata(data_dict[0])
    else:
        raise ValueError("Invalid data-type: %s, HDF variables must be VD or SD only" % data_type)
    return out
예제 #2
0
파일: MODIS.py 프로젝트: tommibergman/cis
    def _create_cube(self, filenames, variable):
        import numpy as np
        from cis.data_io.hdf import _read_hdf4
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord, AuxCoord
        from cis.time_util import calculate_mid_time, cis_standard_time_unit
        from cis.data_io.hdf_sd import get_metadata
        from cf_units import Unit

        variables = ['XDim', 'YDim', variable]
        logging.info("Listing coordinates: " + str(variables))

        cube_list = CubeList()
        # Read each file individually, let Iris do the merging at the end.
        for f in filenames:
            sdata, vdata = _read_hdf4(f, variables)

            lat_coord = DimCoord(_get_MODIS_SDS_data(sdata['YDim']),
                                 standard_name='latitude',
                                 units='degrees')
            lon_coord = DimCoord(_get_MODIS_SDS_data(sdata['XDim']),
                                 standard_name='longitude',
                                 units='degrees')

            # create time coordinate using the midpoint of the time delta between the start date and the end date
            start_datetime = self._get_start_date(f)
            end_datetime = self._get_end_date(f)
            mid_datetime = calculate_mid_time(start_datetime, end_datetime)
            logging.debug("Using {} as datetime for file {}".format(
                mid_datetime, f))
            time_coord = AuxCoord(mid_datetime,
                                  standard_name='time',
                                  units=cis_standard_time_unit,
                                  bounds=[start_datetime, end_datetime])

            var = sdata[variable]
            metadata = get_metadata(var)

            try:
                units = Unit(metadata.units)
            except ValueError:
                logging.warning(
                    "Unable to parse units '{}' in {} for {}.".format(
                        metadata.units, f, variable))
                units = None

            cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
                        dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
                        aux_coords_and_dims=[(time_coord, None)],
                        var_name=metadata._name,
                        long_name=metadata.long_name,
                        units=units)

            cube_list.append(cube)

        # Merge the cube list across the scalar time coordinates before returning a single cube.
        return cube_list.merge_cube()
예제 #3
0
def read_metadata(data_dict, data_type):
    if data_type == 'VD':
        out = hdf_vd.get_metadata(data_dict[0])
    elif data_type == 'SD':
        out = hdf_sd.get_metadata(data_dict[0])
    else:
        raise ValueError(
            "Invalid data-type: %s, HDF variables must be VD or SD only" %
            data_type)
    return out
예제 #4
0
def hdf_read(filenames, variable, start=None, count=None, stride=None):
    """Returns variable, concatenated over a sequence of files."""
    from cis.data_io.hdf import read
    from cis.data_io.hdf_sd import get_metadata
    from cis.utils import concatenate

    sdata, _ = read(filenames, variable)
    var = sdata[variable]
    data = concatenate(
        [_get_MODIS_SDS_data(i, start, count, stride) for i in var])
    metadata = get_metadata(var[0])

    return data, metadata
예제 #5
0
def ncdf_read(filenames, variable, start=None, count=None, stride=None):
    """Returns variable, concatenated over a sequence of files."""
    from cis.data_io.netcdf import read, get_metadata
    from cis.utils import concatenate, listify

    data = []
    for f in listify(filenames):
        sdata = read(f, variable)
        var = sdata[variable]
        data.append(_tidy_ncdf_data(var, start, count, stride))

    metadata = get_metadata(var)

    return concatenate(data), metadata
예제 #6
0
    def _create_cube(self, filenames, variable):
        import numpy as np
        from cis.data_io.hdf import _read_hdf4
        from cis.data_io import hdf_vd
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord, AuxCoord
        from cis.time_util import calculate_mid_time, cis_standard_time_unit
        from cis.data_io.hdf_sd import get_metadata
        from cf_units import Unit

        variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
        logging.info("Listing coordinates: " + str(variables))

        cube_list = CubeList()
        # Read each file individually, let Iris do the merging at the end.
        for f in filenames:
            sdata, vdata = _read_hdf4(f, variables)

            lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid']))
            lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid']))

            lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees')
            lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees')

            # create time coordinate using the midpoint of the time delta between the start date and the end date
            start_datetime = self._get_start_date(f)
            end_datetime = self._get_end_date(f)
            mid_datetime = calculate_mid_time(start_datetime, end_datetime)
            logging.debug("Using {} as datetime for file {}".format(mid_datetime, f))
            time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit,
                                  bounds=[start_datetime, end_datetime])

            var = sdata[variable]
            metadata = get_metadata(var)

            try:
                units = Unit(metadata.units)
            except ValueError:
                logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable))
                units = None

            cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
                        dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
                        aux_coords_and_dims=[(time_coord, None)],
                        var_name=metadata._name, long_name=metadata.long_name, units=units)

            cube_list.append(cube)

        # Merge the cube list across the scalar time coordinates before returning a single cube.
        return cube_list.merge_cube()
예제 #7
0
def test_that_can_get_metadata_for_known_variable():
    data_dict = hdf_sd.read(escape_colons(valid_hdf_sd_file))
    metadata = hdf_sd.get_metadata(data_dict['Latitude'])

    eq_(metadata._name, "Latitude")
    eq_(metadata.standard_name, "latitude")
    eq_(metadata.long_name, "Geodetic Latitude")
    eq_(metadata.shape, [203, 135])
    eq_(metadata.units, "Degrees_north")
    eq_(metadata.factor, 1.0)
    eq_(metadata.offset, 0.0)
    eq_(metadata.missing_value, -999.0)

    attr = metadata.misc
    eq_(len(attr), 5)
    eq_(attr['Parameter_Type'], "MODIS Input")
    eq_(attr['valid_range'], [-90.0, 90.0])
예제 #8
0
파일: test_hdf_sd.py 프로젝트: cpaulik/cis
def test_that_can_get_metadata_for_known_variable():
    data_dict = hdf_sd.read(valid_hdf_sd_file)
    metadata = hdf_sd.get_metadata(data_dict['Latitude'])

    eq_(metadata._name, "Latitude")
    eq_(metadata.standard_name, "latitude")
    eq_(metadata.long_name, "Geodetic Latitude")
    eq_(metadata.shape, [203, 135])
    eq_(metadata.units, "Degrees_north")
    eq_(metadata.range, [-90.0, 90.0])
    eq_(metadata.factor, 1.0)
    eq_(metadata.offset, 0.0)
    eq_(metadata.missing_value, -999.0)

    attr = metadata.misc
    eq_(len(attr), 10)
    eq_(attr['_FillValue'], -999.0)
    eq_(attr['Parameter_Type'], "MODIS Input")