Example #1
0
def read_var_jslice(filename, var_name, j, thetac, phic):
    hdffile = SD(filename, SDC.READ)
    ni = hdffile.select('X_grid').ni - 1
    nk = hdffile.select('X_grid').nk - 1
    phic = phic[:, None]
    thetac = thetac[j]

    if var_name not in ['br', 'btheta', 'bphi', 'vr', 'vtheta', 'vphi']:
        if var_name in ['X_grid', 'Y_grid', 'Z_grid']:
            var = hdffile.select(var_name).get(start=(0, j, 0),
                                               count=(nk, 1, ni)).squeeze()
        else:
            var = hdffile.select(var_name + '_').get(start=(0, j, 0),
                                                     count=(nk, 1,
                                                            ni)).squeeze()
    else:
        if var_name in ['br', 'btheta', 'bphi']:
            bx = hdffile.select('bx_').get(start=(0, j, 0),
                                           count=(nk, 1, ni)).squeeze()
            by = hdffile.select('by_').get(start=(0, j, 0),
                                           count=(nk, 1, ni)).squeeze()
            bz = hdffile.select('bz_').get(start=(0, j, 0),
                                           count=(nk, 1, ni)).squeeze()

            if var_name == 'br':
                var = bx * cos(phic) * sin(thetac) + by * sin(phic) * sin(
                    thetac) + bz * cos(thetac)
            elif var_name == 'btheta':
                var = bx * cos(phic) * cos(thetac) + by * sin(phic) * cos(
                    thetac) - bz * sin(thetac)
            else:
                var = -bx * sin(phic) + by * cos(phic)
        else:
            vx = hdffile.select('vx_').get(start=(0, j, 0),
                                           count=(nk, 1, ni)).squeeze()
            vy = hdffile.select('vy_').get(start=(0, j, 0),
                                           count=(nk, 1, ni)).squeeze()
            vz = hdffile.select('vz_').get(start=(0, j, 0),
                                           count=(nk, 1, ni)).squeeze()

            if var_name == 'vr':
                var = vx * cos(phic) * sin(thetac) + vy * sin(phic) * sin(
                    thetac) + vz * cos(thetac)
            elif var_name == 'vtheta':
                var = vx * cos(phic) * cos(thetac) + vy * sin(phic) * cos(
                    thetac) - vz * sin(thetac)
            else:
                var = -vx * sin(phic) + vy * cos(phic)
    hdffile.end()
    return (var)
Example #2
0
def _create_seadas_chlor_a_file(full_path, mission, sensor):
    h = SD(full_path, SDC.WRITE | SDC.CREATE)
    setattr(h, "Sensor Name", sensor)
    h.Mission = mission
    setattr(h, "Start Time", "2021322175853191")
    setattr(h, "End Time", "2021322180551214")

    lon_info = {
        "type": SDC.FLOAT32,
        "data": np.zeros((5, 5), dtype=np.float32),
        "dim_labels":
        ["Number of Scan Lines", "Number of Pixel Control Points"],
        "attrs": {
            "long_name": "Longitude\x00",
            "standard_name": "longitude\x00",
            "units": "degrees_east\x00",
            "valid_range": (-180.0, 180.0),
        }
    }
    lat_info = {
        "type": SDC.FLOAT32,
        "data": np.zeros((5, 5), np.float32),
        "dim_labels":
        ["Number of Scan Lines", "Number of Pixel Control Points"],
        "attrs": {
            "long_name": "Latitude\x00",
            "standard_name": "latitude\x00",
            "units": "degrees_north\x00",
            "valid_range": (-90.0, 90.0),
        }
    }
    _add_variable_to_file(h, "longitude", lon_info)
    _add_variable_to_file(h, "latitude", lat_info)

    chlor_a_info = {
        "type": SDC.FLOAT32,
        "data": np.zeros((5, 5), np.float32),
        "dim_labels":
        ["Number of Scan Lines", "Number of Pixel Control Points"],
        "attrs": {
            "long_name": "Chlorophyll Concentration, OCI Algorithm\x00",
            "units": "mg m^-3\x00",
            "standard_name":
            "mass_concentration_of_chlorophyll_in_sea_water\x00",
            "valid_range": (0.001, 100.0),
        }
    }
    _add_variable_to_file(h, "chlor_a", chlor_a_info)
    return [full_path]
Example #3
0
def create_test_data():
    """Create a fake MODIS 35 L2 HDF4 file with headers."""
    from datetime import datetime, timedelta

    base_dir, file_name = generate_file_name()
    h = SD(file_name, SDC.WRITE | SDC.CREATE)
    # Set hdf file attributes
    beginning_date = datetime.now()
    ending_date = beginning_date + timedelta(minutes=5)
    core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \
                           "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
                           "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n"\
                           "NUM_VAL = 1\nVALUE = \"{}\"\n"\
                           "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n"\
                           "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
                           "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME".format(
                               beginning_date.strftime("%Y-%m-%d"),
                               beginning_date.strftime("%H:%M:%S.%f"),
                               ending_date.strftime("%Y-%m-%d"),
                               ending_date.strftime("%H:%M:%S.%f")
                           )
    struct_metadata_header = "GROUP=SwathStructure\n"\
                             "GROUP=SWATH_1\n"\
                             "GROUP=DimensionMap\n"\
                             "OBJECT=DimensionMap_2\n"\
                             "GeoDimension=\"Cell_Along_Swath_5km\"\n"\
                             "END_OBJECT=DimensionMap_2\n"\
                             "END_GROUP=DimensionMap\n"\
                             "END_GROUP=SWATH_1\n"\
                             "END_GROUP=SwathStructure\nEND"
    archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND"
    setattr(h, 'CoreMetadata.0', core_metadata_header)  # noqa
    setattr(h, 'StructMetadata.0', struct_metadata_header)  # noqa
    setattr(h, 'ArchiveMetadata.0', archive_metadata_header)  # noqa

    # Fill datasets
    for dataset in TEST_DATA:
        v = h.create(dataset, TEST_DATA[dataset]['type'],
                     TEST_DATA[dataset]['data'].shape)
        v[:] = TEST_DATA[dataset]['data']
        dim_count = 0
        for dimension_name in TEST_DATA[dataset]['attrs']['dim_labels']:
            v.dim(dim_count).setname(dimension_name)
            dim_count += 1
        v.setfillvalue(TEST_DATA[dataset]['fill_value'])
        v.scale_factor = TEST_DATA[dataset]['attrs'].get(
            'scale_factor', SCALE_FACTOR)
    h.end()
    return base_dir, file_name
Example #4
0
def run(FILE_NAME):

    DATAFIELD_NAME = 'precipitation'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        # Ignore the leading singleton dimension.
        nc = Dataset(FILE_NAME)
        data = nc.variables[DATAFIELD_NAME][0, :, :].astype(np.float64)
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)
        ds = hdf.select(DATAFIELD_NAME)
        data = ds[0, :, :].astype(np.float64)

    # Consider 0.0 to be the fill value.
    # Must create a masked array where nan is involved.
    data[data == 0.0] = np.nan
    datam = np.ma.masked_where(np.isnan(data), data)

    # The lat and lon should be calculated manually.
    # More information can be found at:
    # http://disc.sci.gsfc.nasa.gov/additional/faq/precipitation_faq.shtml#lat_lon
    latitude = np.arange(-49.875, 49.875, 0.249375)
    longitude = np.arange(-179.875, 179.876, 0.25)

    # Draw an equidistant cylindrical projection using the low resolution
    # coastline database.
    m = Basemap(projection='cyl',
                resolution='l',
                llcrnrlat=-90,
                urcrnrlat=90,
                llcrnrlon=-180,
                urcrnrlon=180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 120, 30), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180, 180, 45), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, datam.T, latlon=True)
    cb = m.colorbar()
    cb.set_label('Unit:mm/hr')

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1} at scan=0'.format(basename, DATAFIELD_NAME))

    fig = plt.gcf()
    #    plt.show()

    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #5
0
    def __init__(self, filename):

        self.filename = filename
        #        file_name = 'MYD06_L2.A2007219.2010.006.2014053202546.hdf'
        self.file = SD(self.filename, SDC.READ)

        datasets_dic = self.file.datasets()

        print(datasets_dic.keys())

        self.SWC = self.__getitem__("snow_water_content")
        self.R = self.__getitem__("snowfall_rate")
        self.logN0 = self.__getitem__("log_N0")
        self.loglambda = self.__getitem__("log_lambda")
        self.height = self.__getitem__("Height")
def DesHDF(File, name_var, forma=''):
    """
    Extrae los datos de HDF para un archivo(file), y para una varaiable (name_var),
    puede ser Ascendente ('_A') o descendente ('_D')
    """
    # hdfFile = SD.SD(File, mode=1)
    hdfFile = SD(File, mode=1)
    # print 'Reading ', File
    d1 = hdfFile.select(name_var + forma)
    var = d1.get()
    print 'Extracting ', name_var
    d1.endaccess()
    hdfFile.end()

    return var
Example #7
0
    def get_variable_names(self, filenames, data_type=None):
        try:
            from pyhdf.SD import SD
        except ImportError:
            raise ImportError("HDF support was not installed, please reinstall with pyhdf to read HDF files.")

        variables = set([])
        for filename in filenames:
            sd = SD(filename)
            for var_name, var_info in sd.datasets().items():
                # Check that the dimensions are correct
                if var_info[0] == ('YDim:mod08', 'XDim:mod08'):
                    variables.add(var_name)

        return variables
Example #8
0
 def valid_MOD_file(f):
     """Checks a MODIS file can be used."""
     from pyhdf.SD import SD
     from pyhdf.error import HDF4Error
     try:
         file_object = SD(f)
         val = _get_hdf_data(file_object,
                             "Longitude",
                             start=(0, 0),
                             count=(1, 1),
                             stride=(1, 1))
         file_object.end()
         return True
     except HDF4Error:
         return False
Example #9
0
def get_attr_from_hdf(hdf, dataset):
    """Get the attributes from the hdf4 file of the given dataset.

    Args:
        hdf (str): Filename of the the NCSA HDF4 file.
        dataset (str): The given dataset in the hdf file.

    Returns:
        dict: The attributes of the given dataset.

    """
    from pyhdf.SD import SD, SDC
    file_handle = SD(hdf, SDC.READ)
    sds_obj = file_handle.select(dataset)
    return sds_obj.attributes()
Example #10
0
    def _populate_SD(self):
        """Populate SDs and their shape attributes."""

        try:
            h4 = SD(self.filename, mode=SDC.READ)
            # self.sds = sorted(h4.datasets().keys())
            self.sds = sorted(list(h4.datasets()))  # 2 & 3
            self.attr.append(h4.attributes())

            # for k, v in sorted(h4.datasets().viewitems()):
            for k, v in sorted(h4.datasets().items()):
                self.items.append((k, v[1]))
            h4.end()
        except HDF4Error as e:
            raise HDF4Error('{}: {}'.format(e, self.filename))
def _read_file(filename):

    # TODO: Add logging for filename and exception measured.

    try:

        file = SD(filename, SDC.READ)

    except Exception:

        file = None

        print(filename)

    return file
Example #12
0
 def load_wv_nir(self):
     hdf = SD(self.data_sourcedir + self.data, SDC.READ)
     if self.data_wv_nir is None:
         ds_wv_nir = hdf.select('Water_Vapor_Near_Infrared')
         key_across = 'Cell_Across_Swath_1km:mod05'
         key_along = 'Cell_Along_Swath_1km:mod05'
         self.nAlong = ds_wv_nir.dimensions()[key_along]
         self.nAcross = ds_wv_nir.dimensions()[key_across]
         add_offset = ds_wv_nir.attributes()['add_offset']
         scale_factor = ds_wv_nir.attributes()['scale_factor']
         self.data_wv_nir = (ds_wv_nir.get() - add_offset) * scale_factor
         ds_wv_nir.endaccess()
         self.cover = gd.modis_cover_from_gring(hdf)
         hdf.end()
     return self
Example #13
0
    def __init__(self, filename, filename_info, filetype_info):
        BaseFileHandler.__init__(self, filename, filename_info, filetype_info)
        try:
            self.sd = SD(self.filename)
        except HDF4Error as err:
            error_message = "Could not load data from file {}: {}".format(
                self.filename, err)
            raise ValueError(error_message)

        # Read metadata
        self.metadata = self.read_mda(self.sd.attributes()['CoreMetadata.0'])
        self.metadata.update(
            self.read_mda(self.sd.attributes()['StructMetadata.0']))
        self.metadata.update(
            self.read_mda(self.sd.attributes()['ArchiveMetadata.0']))
Example #14
0
def run(FILE_NAME):

    DATAFIELD_NAME = 'ssmiData'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        data = nc.variables[DATAFIELD_NAME][0, 0, :, :].astype(np.float64)
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)
        ds = hdf.select(DATAFIELD_NAME)
        data = ds[0, 0, :, :].astype(np.float64)

    # Consider 0 to be the fill value.
    # Must create a masked array where nan is involved.
    data[data == data[0, 0]] = np.nan
    datam = np.ma.masked_where(np.isnan(data), data)

    # The lat and lon should be calculated manually.
    # More information can be found at:
    # http://disc.sci.gsfc.nasa.gov/precipitation/documentation/TRMM_README/TRMM_3A46_readme.shtml
    latitude = np.arange(89.5, -89.51, -1)
    longitude = np.arange(0.5, 359.51, 1)

    # Draw an equidistant cylindrical projection using the low resolution
    # coastline database.
    m = Basemap(projection='cyl',
                resolution='l',
                llcrnrlat=-90,
                urcrnrlat=90,
                llcrnrlon=0,
                urcrnrlon=360)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 120, 30), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(0, 360, 45), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, datam, latlon=True)
    cb = m.colorbar()
    cb.set_label('Unit:mm/hr')

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))

    fig = plt.gcf()
    # plt.show()

    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #15
0
def read_calipso_hdf4(filename, retv):
    from pyhdf.SD import SD, SDC

    # from pyhdf.HDF import HDF, HC
    # import pyhdf.VS

    def convert_data(data):
        if len(data.shape) == 2:
            if data.shape[1] == 1:
                return data[:, 0]
            elif data.shape[0] == 1:
                return data[0, :]
        return data

    if filename is not None:
        h4file = SD(filename, SDC.READ)
        datasets = h4file.datasets()
        # attributes = h4file.attributes()
        singleshotdata = {}
        for idx, dataset in enumerate(datasets.keys()):
            # non-goups
            if dataset in scip_these_larger_variables_until_needed.keys():
                logger.debug("Not reading " + dataset)
                continue
            elif dataset[0:8] == 'Surface_':
                logger.debug("Not reading " + dataset)
                continue
            if dataset in [
                    "ssNumber_Layers_Found", "ssLayer_Base_Altitude",
                    "ssLayer_Top_Pressure", "ssLayer_Top_Altitude"
            ]:
                singleshotdata[dataset] = h4file.select(dataset).get()
            if dataset[0:2] == "ss":
                # already saved temporarly what we need
                continue
            name = dataset.lower()
            # print idx, dataset
            if dataset in atrain_match_names.keys():
                name = atrain_match_names[dataset]
            data = np.array(h4file.select(dataset).get())
            setattr(retv, name, data)
        if "ssNumber_Layers_Found" in singleshotdata.keys():
            # Extract number of cloudy single shots (max 15)
            # plus average cloud base and top
            # in 5 km FOV
            logger.info("Reading single shot information")
            retv = rearrange_calipso_the_single_shot_info(retv, singleshotdata)
    return retv
Example #16
0
def get_area_def(hdf_file):
    generic_m3 = str(hdf_file)
    metadata = parseMeta(generic_m3)

    # Read the lats and lons from the MYD03 file
    print(f'reading {generic_m3}')
    m3_file = SD(str(generic_m3), SDC.READ)
    lats = m3_file.select('Latitude').get()
    lons = m3_file.select('Longitude').get()
    m3_file.end()

    proj_params = get_proj_params(generic_m3)
    swath_def = SwathDefinition(lons, lats)
    area_def = swath_def.compute_optimal_bb_area(proj_dict=proj_params)

    return (area_def, lons, lats, metadata, swath_def)
Example #17
0
    def getLaserEnergy(filename):
        '''

        :param filename: 文件名
        :return: 小于80 mJ的为能量太弱的光束
        '''
        try:
            dt = SD(filename)
            # SDS Reading(经纬度,分类数据)
            laser_energy_sds = dt.select('ssLaser_Energy_532')
            laser_energy = laser_energy_sds.get()
            return laser_energy[:, 0]
        except HDF4Error:
            print(f'VFM READ ERROR:{filename}')
            print("HDF4Error:", sys.exc_info()[0])
            return None
Example #18
0
    def getLandWaterMask(filename):
        '''

        :param filename:
        :return:
        '''
        try:
            dt = SD(filename)
            # SDS Reading(经纬度,分类数据)
            land_water_mask_sds = dt.select('ssLand_Water_Mask')
            land_water_mask = land_water_mask_sds.get()
            return land_water_mask[:, 0]
        except HDF4Error:
            print(f'VFM READ ERROR:{filename}')
            print("HDF4Error:", sys.exc_info()[0])
            return None
Example #19
0
def HDFsd_read(filename,sdname):
    the_file = SD(str(filename), SDC.READ)
    try:
        out=the_file.select(sdname)
        values=out.get()
        attributes=out.attributes()
    except HDF4Error as e:
        datasets_dict = the_file.datasets()
        print(f"couldn't find {sdname} in "
              f"\n{pprint.pformat(datasets_dict)}")
        values=None
        attributes=None
        print(e)
    the_file.end()
        
    return values, attributes
Example #20
0
 def get_hdf4_data(cls, hdf4_file, data_name, slope=None, intercept=None, valid_range=None):
     hdf = SD(hdf4_file, SDC.READ)
     dataset = hdf.select(data_name)
     if dataset is not None:
         attrs = dataset.attributes()
         if slope is None:
             slope = attrs['scale_factor']
         if intercept is None:
             intercept = attrs['add_offset']
         if valid_range is None:
             valid_range = attrs['valid_range']
         data = dataset.get().astype(np.float)
         valid_index = np.logical_or(data < valid_range[0], data > valid_range[1])
         data = data * slope + intercept
         data[valid_index] = -999
         return data
Example #21
0
def get_vfm_coor(vfm_filename):
    # 1. p_fileAddress:vfm数据的文件地址
    # 2. p_range_box 经纬度范围
    try:
        dt = SD(vfm_filename)
        # SDS Reading(经纬度,分类数据)
        latSds = dt.select('ssLatitude')
        lonSds = dt.select('ssLongitude')
        latitude = latSds.get()
        longitude = lonSds.get()
        return longitude, latitude
    except HDF4Error:
        print('VFM READ ERROR:')
        print(vfm_filename)
        print("Unexpected error:", sys.exc_info()[0])
        return None, None
Example #22
0
 def open(self, files:list) -> dict:
     data_dict = {b: [] for b in self.bands}
     for s in self.bands:
         f = sorted([f for f in files if s in f.name])
         if len(f) == 0:
             warn(f'No file for {s} found on {files}')
         for f0 in f:
             hdf_data = SD(str(f0), SDC.READ)
             hdf_file = hdf_data.select(s)
             scale = hdf_attr_check('Scale', hdf_file.attributes(), default=1)
             offset = hdf_attr_check('Offset', hdf_file.attributes(), default=0)
             data = hdf_file[:].astype(float)*scale + offset
             data[data <= -999] = np.nan
             data[data >= 65527] = np.nan
             data_dict[s].append(data)
     return data_dict
Example #23
0
    def __init__(self, filename):
        """
        Open an HDF4 file for reading.

        Arguments:

            filename(str): The path to the file to open.
        """
        super().__init__()
        from pyhdf.HDF import HDF, HC
        from pyhdf.SD import SD, SDC
        import pyhdf.VS
        self.filename = filename
        self.hdf = HDF(self.filename, HC.READ)
        self.vs = self.hdf.vstart()
        self.sd = SD(self.filename, SDC.READ)
Example #24
0
    def __init__(self, file_):
        fh = SD(file_, SDC.READ)

        dims = get_dims(file_)
        self.dim_min = np.array([np.min(dim) for dim in dims.itervalues()])
        self.dim_max = np.array([np.max(dim) for dim in dims.itervalues()])

        values = fh.select('Time   1')[:].astype(float)

        self.interp = RegularGridInterpolator(dims.values(),
                                              values,
                                              method='linear',
                                              bounds_error=True,
                                              fill_value=None)

        fh.end()
Example #25
0
 def set_file_attr(self):
     """
     get hdf5 file attrs self.file_attr
     :return:
     """
     if self.resolution == 13500:
         satellite_type = ['AQUA', 'TERRA']
         if self.satellite in satellite_type:
             h4r = SD(self.in_file, SDC.READ)
             self.file_attr = attrs2dict(h4r.attributes())
         else:
             raise ValueError('Cant read this satellite`s data.: {}'.format(
                 self.satellite))
     else:
         raise ValueError("Cant handle this resolution: ".format(
             self.resolution))
Example #26
0
def parse_hdf(filename):
    hdf_file = '{path}/data/{filename}'.format(
        path=sys.path[0],
        filename=filename
    )
    file = SD(hdf_file)
    print log_string, 'file info: ', file.info()
    datasets_dict = file.datasets()
    print log_string, '数据集:'
    for idx, sds in enumerate(datasets_dict):
        sds_obj = file.select(sds)
        data = sds_obj.get()
        data_attr = sds_obj.attributes()
        availabe_dict[sds] = data
        print log_string, idx, sds, ' :', data.shape
    file.end()
Example #27
0
    def read_BRDF(self):
        """Reads MCD43B1 one Tile file with Level 3 BRDF kernels for each MODIS band."""

        # Create empty lists for SDS to be read from file
        # -----------------------------------------------
        for name in self.SDS:
            self.__dict__[name] = []

        BRDF = MISSING * ones((len(self.SDS), self.nobs, 3))

        for fn, I in self.unique_fn:
            index = I[0]
            if self.verb:
                print index, type(index), len(index)
            # Don't fuss if the file cannot be opened
            # ---------------------------------------
            try:
                if self.verb:
                    print "[] Working on " + fn
                hfile = SD(fn)
            except HDF4Error:
                if self.verb > 2:
                    print "- %s: not recognized as an HDF file" % filename
                return

            # Read select variables (reshape to allow concatenation later)
            # ------------------------------------------------------------
            for sds in self.SDS:
                if self.verb:
                    print 'sds', self.SDS.index(sds)
                v = hfile.select(sds).get()
                a = hfile.select(sds).attributes()
                if a['scale_factor'] != 1.0 or a['add_offset'] != 0.0:
                    v = a['scale_factor'] * v + a['add_offset']
                if self.verb:
                    print array(
                        self.dx)[index], BRDF.shape, BRDF[self.SDS.index(sds),
                                                          index], v.shape

                BRDF[self.SDS.index(sds),
                     index, :] = v[array(self.dx)[index],
                                   array(self.dy)[index], :]

        for sds in self.SDS:
            self.__dict__[sds] = BRDF[self.SDS.index(sds), :, :]
            if sds in ALIAS.keys():
                self.__dict__[ALIAS[sds]] = self.__dict__[sds]
def run(FILE_NAME):

    DATAFIELD_NAME = 'nearSurfZ'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        data = nc.variables[DATAFIELD_NAME][:].astype(np.float64)
        # Retrieve the geolocation data.
        latitude = nc.variables['geolocation'][:, :, 0]
        longitude = nc.variables['geolocation'][:, :, 1]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)
        ds = hdf.select(DATAFIELD_NAME)
        data = ds[:, :].astype(np.double)
        # Retrieve the geolocation data.
        geo = hdf.select('geolocation')
        latitude = geo[:, :, 0]
        longitude = geo[:, :, 1]

    # There's no fill value set, but 0.0 is considered the fill value.
    data[data == 0.0] = np.nan
    datam = np.ma.masked_array(data, np.isnan(data))

    # Draw an equidistant cylindrical projection using the high resolution
    # coastline database.
    m = Basemap(projection='cyl',
                resolution='h',
                llcrnrlat=30,
                urcrnrlat=36,
                llcrnrlon=123,
                urcrnrlon=135)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(30, 37), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(123, 135, 2), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, datam, latlon=True)
    cb = m.colorbar()
    cb.set_label('Unit:none')

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()

    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #29
0
def read_amsr_hdf4(filename):
    from pyhdf.SD import SD, SDC
    from pyhdf.HDF import HDF  # HC
    import pyhdf.VS

    retv = AmsrObject()
    h4file = SD(filename, SDC.READ)
    # datasets = h4file.datasets()
    # attributes = h4file.attributes()
    # for idx, attr in enumerate(attributes.keys()):
    #    print idx, attr
    for sds in ["Longitude", "Latitude", "High_res_cloud"]:
        data = h4file.select(sds).get()
        if sds in ["Longitude", "Latitude"]:
            retv.all_arrays[sds.lower()] = data.ravel()
        elif sds in ["High_res_cloud"]:
            lwp_gain = h4file.select(sds).attributes()['Scale']
            retv.all_arrays["lwp_mm"] = data.ravel() * lwp_gain

        # print h4file.select(sds).info()
    h4file = HDF(filename, SDC.READ)
    vs = h4file.vstart()
    data_info_list = vs.vdatainfo()
    # print "1D data compound/Vdata"
    for item in data_info_list:
        # 1D data compound/Vdata
        name = item[0]
        # print name
        if name in ["Time"]:
            data_handle = vs.attach(name)
            data = np.array(data_handle[:])
            retv.all_arrays["sec1993"] = data
            data_handle.detach()
        else:
            pass
            # print name
        # data = np.array(data_handle[:])
        # attrinfo_dic = data_handle.attrinfo()
        # factor = data_handle.findattr('factor')
        # offset = data_handle.findattr('offset')
        # print data_handle.factor
        # data_handle.detach()
    # print data_handle.attrinfo()
    h4file.close()
    # for key in retv.all_arrays.keys():
    #    print key, retv.all_arrays[key]
    return retv
Example #30
0
 def open_file(self, f):
     # f could be either a file or numpy array
     ext = os.path.splitext(f)[1] if isinstance(f, str) else None
     if ext is None:
         ims = imageseries.open(None, 'array', data=f)
     elif ext in self.HDF4_FILE_EXTS:
         from pyhdf.SD import SD, SDC
         hdf = SD(f, SDC.READ)
         dset = hdf.select(self.path[1])
         ims = imageseries.open(None, 'array', data=dset)
     elif ext in self.HDF5_FILE_EXTS:
         data = h5py.File(f, 'r')
         dset = data['/'.join(self.path)][()]
         if dset.ndim < 3:
             # Handle raw two dimesional data
             ims = imageseries.open(None, 'array', data=dset)
         else:
             data.close()
             ims = imageseries.open(f,
                                    'hdf5',
                                    path=self.path[0],
                                    dataname=self.path[1])
     elif ext == '.npz':
         ims = imageseries.open(f, 'frame-cache')
     elif ext == '.yml':
         data = yaml.load(open(f))
         form = next(iter(data))
         ims = imageseries.open(f, form)
     else:
         # elif ext in self.IMAGE_FILE_EXTS:
         input_dict = {'image-files': {}}
         input_dict['image-files']['directory'] = os.path.dirname(f)
         input_dict['image-files']['files'] = os.path.basename(f)
         input_dict['options'] = {}
         input_dict['meta'] = {}
         temp = tempfile.NamedTemporaryFile(delete=False)
         try:
             data = yaml.dump(input_dict).encode('utf-8')
             temp.write(data)
             temp.close()
             ims = imageseries.open(temp.name, 'image-files')
         finally:
             # Ensure the file gets removed from the filesystem
             os.remove(temp.name)
     # else:
     #     ims = imageseries.open(f, 'array')
     return ims