def test_1000m_to_250m(self):
        """test the 1 km to 250 meter interpolation facility
        """
        gfilename_hdf = "testdata/MOD03_A12278_113638_2012278145123.hdf"
        gfilename = "testdata/250m_lonlat_section_input.npz"
        result_filename = "testdata/250m_lonlat_section_result.npz"

        from pyhdf.SD import SD
        from pyhdf.error import HDF4Error
        
        gdata = None
        try:
            gdata = SD(gfilename_hdf)
        except HDF4Error:
            print "Failed reading eos-hdf file %s" % gfilename_hdf
            try:
                indata = np.load(gfilename)
            except IOError:
                return

        if gdata:
            lats = gdata.select("Latitude")[20:50, :]
            lons = gdata.select("Longitude")[20:50, :]
        else:
            lats = indata['lat'] / 1000.
            lons = indata['lon'] / 1000.

        verif = np.load(result_filename)
        vlons = verif['lon'] / 1000.
        vlats = verif['lat'] / 1000.
        tlons, tlats = modis1kmto250m(lons, lats)

        self.assert_(np.allclose(tlons, vlons, atol=0.05))
        self.assert_(np.allclose(tlats, vlats, atol=0.05))
Example #2
0
def load_standard_lfm_hdf(filename):
	""" Load the standard formated hdf which we want to emulate"""
	f = SD(filename, SDC.READ)
	X_grid = f.select('X_grid')
	Y_grid = f.select('Y_grid')
	Z_grid = f.select('Z_grid')

	# x_grid is size nkp1,njp1,nip1
	(nkp1,njp1,nip1) = X_grid[:].shape
	# The LFM reader expects i to vary fastest, then j, then k
	# However, the LFM pre-converted files store positions with k varying fastest (column-major)
	# Recommend saving in column-major format. If it fails, we can always switch.

	
	# i = 0; j = 0; k = 0
	# print 'printing standard first row'
	# for i in range(nip1):
	# 	print X_grid[k,j,i]/R_e

	# print 'printing j sweep'
	# i = 0; j = 0; k = 0;
	# for j in range(njp1):
	# 	print X_grid[k,j,i]/R_e

	# print 'printing k sweep'
	# i = 0; j = 0; k = 0;
	# for k in range(nkp1):
	# 	print X_grid[k,j,i]/R_e


	print 'standard nip1,njp1,nkp1 =', nip1,njp1,nkp1
	ni = nip1-1
	nj = njp1-1
	nk = nkp1-1
	print 'standard ni,nj,nk =', ni,nj,nk
Example #3
0
def read_var_point(filename,var_name,i,j,k,thetac,phic):
    thetac = thetac[j]
    phic   = phic[k]


    hdffile = SD(filename,SDC.READ)
    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
        var=hdffile.select(var_name+'_').get(start=(k,j,i),count=(1,1,1)).squeeze()
    else:
#        R,theta,phi=r_theta_phi_uniform(filename)

        if var_name in ['br','btheta','bphi']:
            bx=hdffile.select('bx_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            by=hdffile.select('by_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            bz=hdffile.select('bz_').get(start=(k,j,i),count=(1,1,1)).squeeze()

            if var_name=='br':
                var     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
            elif var_name=='btheta':
                var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
            else:
                var   =-bx*sin(phic)            + by*cos(phic)
        else:
            vx=hdffile.select('vx_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            vy=hdffile.select('vy_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            vz=hdffile.select('vz_').get(start=(k,j,i),count=(1,1,1)).squeeze()

            if var_name=='vr':
                var    = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
            elif var_name=='vtheta':
                var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
            else:
                var   =-vx*sin(phic)            + vy*cos(phic)
    hdffile.end()
    return(var)
def readMOD35L2(fname, geoloc_only=False):
    hdf_file = SD(HDFDIR + fname)
    if not geoloc_only:
        cloud_mask = hdf_file.select('Cloud_Mask').get()
    lon = hdf_file.select('Longitude').get()
    lat = hdf_file.select('Latitude').get()
    hdf_file.end()
    
    if not geoloc_only:
        cld_msk = uint8(cloud_mask[0])
        cloud = cld_msk & 6 # 0, 2, 4, 6
        land = cld_msk & 192 # 0, 64, 128, 192
    
        cloud[cloud==0] = 1 # 0 -> Cloud
        cloud[cloud!=1] = 0 # 2, 4, 6 -> No cloud

        coast = land
        coast[coast==64] = 1 # 64 -> Coast
        coast[coast!=1] = 0 # 0, 128, 192 -> Not coast

        land[land!=0] = 1 # 64, 128, 192 -> Land, 0 -> Water
        
        return lon, lat, cloud, land, coast

    return lon, lat
Example #5
0
    def test_5_to_1(self):
        """test the 5km to 1km interpolation facility
        """
        #gfilename = "testdata/MOD03_A12097_174256_2012097175435.hdf"
        gfilename = "/san1/test/data/modis/MOD03_A12097_174256_2012097175435.hdf"
        #filename = "testdata/MOD021km_A12097_174256_2012097175435.hdf"
        filename = "/san1/test/data/modis/MOD021km_A12097_174256_2012097175435.hdf"
        from pyhdf.SD import SD
        from pyhdf.error import HDF4Error

        try:
            gdata = SD(gfilename)
            data = SD(filename)
        except HDF4Error:
            print "Failed reading both eos-hdf files %s and %s" % (gfilename, filename)
            return
        
        glats = gdata.select("Latitude")[:]
        glons = gdata.select("Longitude")[:]
    
        lats = data.select("Latitude")[:]
        lons = data.select("Longitude")[:]
        
        tlons, tlats = modis5kmto1km(lons, lats)

        self.assert_(np.allclose(tlons, glons, atol=0.05))
        self.assert_(np.allclose(tlats, glats, atol=0.05))
Example #6
0
def read_var_islice(filename,var_name,i,thetac,phic):
    nk = phic.size
    nj = thetac.size
    phic = phic[:,None]
    thetac = thetac[None,:]

    hdffile = SD(filename,SDC.READ)
    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
        var=hdffile.select(var_name+'_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
    else:
        if var_name in ['br','btheta','bphi']:
            bx=hdffile.select('bx_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            by=hdffile.select('by_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            bz=hdffile.select('bz_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()

            if var_name=='br':
                var     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
            elif var_name=='btheta':
                var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
            else:
                var   =-bx*sin(phic)            + by*cos(phic)
        else:
            vx=hdffile.select('vx_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            vy=hdffile.select('vy_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            vz=hdffile.select('vz_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()

            if var_name=='vr':
                var    = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
            elif var_name=='vtheta':
                var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
            else:
                var   =-vx*sin(phic)            + vy*cos(phic)
    hdffile.end()
    return(var)
Example #7
0
    def test_1000m_to_250m(self):
        """Test the 1 km to 250 meter interpolation facility."""
        # gfilename = \
        #      "/san1/test/data/modis/MOD03_A12278_113638_2012278145123.hdf"
        gfilename = "/local_disk/src/python-geotiepoints/tests/MOD03_A12278_113638_2012278145123.hdf"
        # result_filename = \
        #      "/san1/test/data/modis/250m_lonlat_results.npz"
        result_filename = "/local_disk/src/python-geotiepoints/tests/250m_lonlat_results.npz"

        from pyhdf.SD import SD
        from pyhdf.error import HDF4Error

        try:
            gdata = SD(gfilename)
        except HDF4Error:
            print("Failed reading eos-hdf file %s" % gfilename)
            return

        lats = gdata.select("Latitude")[0:50, :]
        lons = gdata.select("Longitude")[0:50, :]

        verif = np.load(result_filename)
        vlons = verif['lons']
        vlats = verif['lats']
        tlons, tlats = modis1kmto250m(lons, lats)

        self.assert_(np.allclose(tlons, vlons, atol=0.05))
        self.assert_(np.allclose(tlats, vlats, atol=0.05))
def run(FILE_NAME):

    DATAFIELD_NAME = 'dHat'

    if USE_NETCDF4:
        from netCDF4 import Dataset    
        nc = Dataset(FILE_NAME)
        var = nc.variables[DATAFIELD_NAME]
        # This datafield has scale factor and add offset attributes, but no
        # fill value.  We'll turn off automatic scaling and do it ourselves.
        var.set_auto_maskandscale(False)
        data = nc.variables[DATAFIELD_NAME][:].astype(np.float64)

        # Retrieve scale/offset attributes.
        scale_factor = var.scale_factor
        add_offset = var.add_offset
    
        # Retrieve the geolocation data.
        latitude = nc.variables['geolocation'][:,:,0]
        longitude = nc.variables['geolocation'][:,:,1]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)
        
        ds = hdf.select(DATAFIELD_NAME)
        data = ds[:,:].astype(np.double)

        # Handle scale/osffset attributes.
        attrs = ds.attributes(full=1)
        sfa=attrs["scale_factor"]
        scale_factor = sfa[0]
        aoa=attrs["add_offset"]
        add_offset = aoa[0]

        # Retrieve the geolocation data.        
        geo = hdf.select('geolocation')
        latitude = geo[:,:,0]
        longitude = geo[:,:,1]

    data = data / scale_factor + add_offset
    
    # Draw an equidistant cylindrical projection using the high resolution
    # coastline database.
    m = Basemap(projection='cyl', resolution='h',
                llcrnrlat=30, urcrnrlat = 36,
                llcrnrlon=121, urcrnrlon = 133)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(30, 37), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(121, 133, 2), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, data, latlon=True)
    cb = m.colorbar()
    cb.set_label('Unit:mm')

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()
    
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    DATAFIELD_NAME = 'Temperature_MW_A'
    if USE_NETCDF4:
        from netCDF4 import Dataset    
        nc = Dataset(FILE_NAME)

        # The variable has a fill value, 
        # so netCDF4 converts it to a float64 masked array for us.
        data = nc.variables[DATAFIELD_NAME][11,:,:]
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]

    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # List available SDS datasets.
        # print hdf.datasets()

        # Read dataset.
        data3D = hdf.select(DATAFIELD_NAME)
        data = data3D[11,:,:]

        # Read geolocation dataset.
        lat = hdf.select('Latitude')
        latitude = lat[:,:]
        lon = hdf.select('Longitude')
        longitude = lon[:,:]

        # Handle fill value.
        attrs = data3D.attributes(full=1)
        fillvalue=attrs["_FillValue"]

        # fillvalue[0] is the attribute value.
        fv = fillvalue[0]
        data[data == fv] = np.nan
        data = np.ma.masked_array(data, np.isnan(data))

    
    # Draw an equidistant cylindrical projection using the low resolution
    # coastline database.
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-90, urcrnrlat = 90,
                llcrnrlon=-180, urcrnrlon = 180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180., 181., 45.), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, data, latlon=True, alpha=0.90)
    cb = m.colorbar()
    cb.set_label('Unit:K')
    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n {1} at TempPrsLvls=11'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.{1}.py.png".format(basename, DATAFIELD_NAME)
    fig.savefig(pngfile)
Example #10
0
    def setup_grid(self):
        """Setup necessary variables for grid """

        if not os.path.isfile(self.datadir + self.gridfile):
            urllib.urlretrieve(self.dataurl + self.gridfile,
                               self.datadir + self.gridfile)
        g = SD(self.datadir + self.gridfile, SDC.READ)
        self.llat = g.select('Latitude')[:]
        self.llon = g.select('Longitude')[:]
def run(FILE_NAME):

    DATAFIELD_NAME = 'SurfaceTemperature'

    # The dataset is (6144 x 6400).  Subset it to be around than 1K x 1K
    # Otherwise, the plot will skip processing some regions.
    rows = slice(0, 6144, 6)
    cols = slice(0, 6400, 6)

    if USE_NETCDF4:    
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)

        data = nc.variables[DATAFIELD_NAME][rows, cols]
    
        # Retrieve the geolocation data.
        latitude = nc.variables['Latitude'][rows, cols]
        longitude = nc.variables['Longitude'][rows, cols]
        
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[rows,cols]

        # Read geolocation dataset.
        lat = hdf.select('Latitude')
        latitude = lat[rows,cols]
        lon = hdf.select('Longitude')
        longitude = lon[rows,cols]
        

    # Apply the fill value.  The valid minimum is zero, although there's no
    # attribute.
    data[data < 0] = np.nan
    data = np.ma.masked_array(data, np.isnan(data))
    
    # Render the data in a lambert azimuthal equal area projection.
    m = Basemap(projection='nplaea', resolution='l',
                boundinglat=60, lon_0=43)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(50, 90, 10), labels=[1, 0, 0, 1])
    m.drawmeridians(np.arange(-180, 180, 30))
    x, y = m(longitude, latitude)
    m.pcolormesh(x, y, data)
    cb = m.colorbar()
    cb.set_label('Unknown')

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #12
0
def get_lat_lon_modis(satscene, options):
    """Read lat and lon.
    """
    filename_tmpl = satscene.time_slot.strftime(options["geofile"])
    file_list = glob.glob(os.path.join(options["dir"], filename_tmpl))

    if len(file_list) == 0:
        # Try in the same directory as the data
        data_dir = os.path.split(options["filename"])[0]
        file_list = glob.glob(os.path.join(data_dir, filename_tmpl))

    if len(file_list) > 1:
        logger.warning("More than 1 geolocation file matching!")
        filename = max(file_list, key=lambda x: os.stat(x).st_mtime)
        coarse_resolution = 1000
    elif len(file_list) == 0:
        logger.warning("No geolocation file matching " + filename_tmpl
                       + " in " + options["dir"])
        logger.debug("Using 5km geolocation and interpolating")
        filename = options["filename"]
        coarse_resolution = 5000
    else:
        filename = file_list[0]
        coarse_resolution = 1000

    logger.debug("Loading geolocation file: " + str(filename)
                 + " at resolution " + str(coarse_resolution))

    resolution = options["resolution"]

    data = SD(str(filename))
    lat = data.select("Latitude")
    fill_value = lat.attributes()["_FillValue"]
    lat = np.ma.masked_equal(lat.get(), fill_value)
    lon = data.select("Longitude")
    fill_value = lon.attributes()["_FillValue"]
    lon = np.ma.masked_equal(lon.get(), fill_value)

    if resolution == coarse_resolution:
        return lat, lon

    cores = options["cores"]

    from geotiepoints import modis5kmto1km, modis1kmto500m, modis1kmto250m
    logger.debug("Interpolating from " + str(coarse_resolution)
                 + " to " + str(resolution))
    if coarse_resolution == 5000:
        lon, lat = modis5kmto1km(lon, lat)
    if resolution == 500:
        lon, lat = modis1kmto500m(lon, lat, cores)
    if resolution == 250:
        lon, lat = modis1kmto250m(lon, lat, cores)

    return lat, lon
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'bsst'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        # Subset the data to match the size of the swath geolocation fields.
        # Turn off autoscaling, we'll handle that ourselves due to non-standard
        # naming of the offset attribute.
        var = nc.variables[DATAFIELD_NAME]
        var.set_auto_maskandscale(False)
        lat = nc.variables['lat']
        lon = nc.variables['lon']
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        var = hdf.select(DATAFIELD_NAME)
        lat = hdf.select('lat')
        lon = hdf.select('lon')

    latitude = lat[::8]
    longitude = lon[::8]
    data = var[::8, ::8].astype(np.float64)
    
    # Apply the attributes.  By inspection, fill value is 0
    data[data==0] = np.nan
    data = data * var.scale_factor + var.add_off
    datam = np.ma.masked_array(data, mask=np.isnan(data))
    
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-90, urcrnrlat=90,
                llcrnrlon=-180, urcrnrlon=180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 91, 45))
    m.drawmeridians(np.arange(-180, 180, 45), labels=[True,False,False,True])
    m.pcolormesh(longitude, latitude, datam, latlon=True)

    cax = plt.axes([0.92, 0.3, 0.01, 0.4])
    cb = plt.colorbar(cax=cax)
    units = 'degrees-C'
    cb.set_label(units)    
    
    basename = os.path.basename(FILE_NAME)
    fig = plt.gcf()
    # plt.show()
    long_name = 'Sea Surface Temperature ('+DATAFIELD_NAME+')'
    fig.suptitle('{0}\n{1}'.format(basename, long_name))
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    # Identify the HDF-EOS2 swath data file.
    DATAFIELD_NAME = 'radiances'

    if USE_NETCDF4:
        from netCDF4 import Dataset    
        nc = Dataset(FILE_NAME)
        data = nc.variables['radiances'][:,:,567]
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data3D = hdf.select(DATAFIELD_NAME)
        data = data3D[:,:,567]

        # Read geolocation dataset.
        lat = hdf.select('Latitude')
        latitude = lat[:,:]
        lon = hdf.select('Longitude')
        longitude = lon[:,:]
        

    
    # Replace the filled value with NaN, replace with a masked array.
    data[data == -9999] = np.nan
    datam = np.ma.masked_array(data, np.isnan(data))
    
 
    # Draw a polar stereographic projection using the low resolution coastline
    # database.
    m = Basemap(projection='spstere', resolution='l',
                boundinglat=-65, lon_0 = 180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-80., -50., 5.))
    m.drawmeridians(np.arange(-180., 181., 20.), labels=[1, 0, 0, 1])
    x, y = m(longitude, latitude)
    m.pcolormesh(x, y, datam)

   # See page 101 of "AIRS Version 5.0 Released Files Description" document [1]
    # for unit specification.
    units = 'mW/m**2/cm**-1/sr'
    cb = m.colorbar()
    cb.set_label('Unit:'+units)
    
    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n {1} at channel=567'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #15
0
def read_var(fname,varname,normalized=False):
    f     = SD(fname,SDC.READ)
    phi   = f.select('fakeDim0')[:]
    theta = f.select('fakeDim1')[:]
    r     = f.select('fakeDim2')[:]
    var   = f.select('Data-Set-2')[:]
    f.end()

    if normalized:
        return(phi,theta,r,var)
    else:
        return(phi,theta,r*mas_units['length'],var*mas_units[varname])
Example #16
0
def load_hdf_spec(filename,ix,iy,data_name='MCD43GF_CMG'):
    """
     Purpose:
        Simple hdf load file for MCD43GF files. Chose only specific indices to load 
    
    Input: 
        filename: full file path and name of the hdf file to load
        ix: array of indices to be loaded in the x direction
        iy: array of indices to be loaded in the y direction
        
    Output:
        dat: np.array of the requested data at the indices
    
    Keywords: 
        data_name: (defaults to MCD43GF_CMG) the name of the dataset to load within the filename
    
    Dependencies:
        numpy
        pyhdf
    
    Required files:
        filename
        
    Example:
        
        >>b = load_hdf_spec(fp+'MCD43GF_geo_shortwave_193_2007.hdf',[200,201,202],[503,504,505,506])
        >>b
        array([[ nan,  nan,  nan,  nan],
               [ nan,  nan,  nan,  nan],
               [ nan,  nan,  nan,  nan]])
        >>b.shape
        (3L, 4L)
        
    Modification History:
    
        Written (v1.0): Samuel LeBlanc, 2017-03-22, Santa Cruz, CA
    """
    import numpy as np
    from pyhdf.SD import SD, SDC
    hdf = SD(filename, SDC.READ)
    if hasattr(ix,'__len__'):
        if (len(ix)-1)<(ix[-1]-ix[0]):
            raise ValueError('ix is not a contiguous array')
        if (len(iy)-1)<(iy[-1]-iy[0]):
            raise ValueError('iy is not a contiguous array')
        dat = hdf.select(data_name).get(start=(ix[0],iy[0]),count=(ix[-1]-ix[0]+1,iy[-1]-iy[0]+1))
    else:
        dat = hdf.select(data_name).get(start=(ix,iy),count=(1,1))
    dat = dat.astype(float)
    dat[dat==32767] = np.nan
    dat = dat/1000.0
    return dat    
def run(FILE_NAME):

    DATAFIELD_NAME = 'binDIDHmean'

    if USE_NETCDF4:    
        from netCDF4 import Dataset    
        nc = Dataset(FILE_NAME)
        data = nc.variables[DATAFIELD_NAME][:].astype(np.float64)
    
        # Retrieve the geolocation data.
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.        
        ds = hdf.select(DATAFIELD_NAME)
        data = ds[:,:]

        # Read geolocation dataset.
        lat = hdf.select('Latitude')
        latitude = lat[:,:]
        lon = hdf.select('Longitude')
        longitude = lon[:,:]

    
    # The swath crosses the international dateline between row 6000 and 7000.
    # This causes the mesh to smear, so we'll adjust the longitude (modulus
    # 360 degrees, of course) both in the longitude array and in the basemap
    # definition to avoid that.
    longitude[longitude < -60] += 360
    
    # Draw an equidistant cylindrical projection using the low resolution
    # coastline database.
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-90, urcrnrlat = 90,
                llcrnrlon=-60, urcrnrlon = 300)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180., 181., 45.), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, data, latlon=True)
    cb = m.colorbar()
    cb.set_label('Unit:none')


    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n {1}'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #18
0
def load_thin_modis(satscene, options):
    """Read modis data from file and load it into *satscene*.
    """
    filename = satscene.time_slot.strftime("thin_MYD021KM.A%Y%j.%H%M.005.NRT.hdf")
    filename = os.path.join(options["dir"], filename)
    
    data = SD(filename)

    datasets = ['EV_250_Aggr1km_RefSB',
                'EV_500_Aggr1km_RefSB',
                'EV_1KM_RefSB',
                'EV_1KM_Emissive']

    for dataset in datasets:
        subdata = data.select(dataset)
        band_names = subdata.attributes()["band_names"].split(",")
        if len(satscene.channels_to_load & set(band_names)) > 0:
            uncertainty = data.select(dataset+"_Uncert_Indexes")
            if dataset == 'EV_1KM_Emissive':
                array = calibrate_tb(subdata, uncertainty)
            else:
                array = calibrate_refl(subdata, uncertainty)
            for (i, band) in enumerate(band_names):
                if band in satscene.channels_to_load:
                    satscene[band] = array[i]

    mda = data.attributes()["CoreMetadata.0"]
    orbit_idx = mda.index("ORBITNUMBER")
    satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116]

    lat, lon = get_lat_lon(satscene, None)
    from pyresample import geometry
    satscene.area = geometry.SwathDefinition(lons=lon, lats=lat)

    # trimming out dead sensor lines
    if satscene.satname == "aqua":
        for band in ["6", "27"]:
            if not satscene[band].is_loaded() or satscene[band].data.mask.all():
                continue
            width = satscene[band].data.shape[1]
            height = satscene[band].data.shape[0]
            indices = satscene[band].data.mask.sum(1) < width
            if indices.sum() == height:
                continue
            satscene[band] = satscene[band].data[indices, :]
            satscene[band].area = geometry.SwathDefinition(
                lons=satscene.area.lons[indices,:],
                lats=satscene.area.lats[indices,:])
            satscene[band].area.area_id = ("swath_" + satscene.fullname + "_"
                                           + str(satscene.time_slot) + "_"
                                           + str(satscene[band].shape) + "_"
                                           + str(band))
Example #19
0
def callback(body, message):
    """Do actual work."""

    logger.info("body in callback() is %s" % body)

    # pull lat/lon, time
    path = body
    sd = SD(path)
    lat = N.array(sd.select('Latitude').get())
    lon = N.array(sd.select('Longitude').get())
    t = N.array(sd.select('Time').get())
    sd.end()
    #logger.info("lat: %s" % str(lat.shape))
    #logger.info("lon: %s" % str(lon.shape))
    #logger.info("time: %s" % str(t.shape))

    # build metadata json
    id = os.path.basename(path)
    md = {
        "id": id,
        "dataset": "AIRX2RET",
        "starttime": t[0,0],
        "endtime": t[44,29],
        "location": {
            "coordinates": [[
                [ lon[0,0], lat[0,0] ],
                [ lon[0,29], lat[0,29] ],
                [ lon[44,29], lat[44,29] ],
                [ lon[44,0], lat[44,0] ],
                [ lon[0,0], lat[0,0] ],
            ]], 
            "type": "polygon"
        }, 
        "urls": "http://mozart/data/public/products/%s" % id
    }

    # publish
    pub_dir = '/data/public/products'
    ensure_dir(pub_dir)
    shutil.move(path, os.path.join(pub_dir, id))

    # insert into ElasticSearch
    index = doctype = 'airs'
    conn = ES('http://localhost:9200')
    mapping = json.load(open('grq_mapping.json'))
    if not conn.indices.exists_index(index):
        conn.indices.create_index(index, mapping)
    conn.indices.put_mapping(doctype, mapping, index)
    ret = conn.index(md, index, doctype, md['id'])

    message.ack()
def run(FILE_NAME):

    DATAFIELD_NAME = 'CO Profiles Day'

    if USE_NETCDF4:
        from netCDF4 import Dataset
    
        nc = Dataset(FILE_NAME)

        data = nc.variables[DATAFIELD_NAME][:, :, 1].astype(np.float64)
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]
        pressure = nc.variables['Pressure Grid'][:]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data3D = hdf.select(DATAFIELD_NAME)
        data = data3D[:, :, 1].astype(np.float64)

        # Read coordinates.
        latitude = hdf.select('Latitude')[:]
        longitude = hdf.select('Longitude')[:]
        pressure = hdf.select('Pressure Grid')[:]


    # Replace the fill value with NaN
    data[data == -9999] = np.nan
    data = np.ma.masked_array(data, np.isnan(data))
    
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-90, urcrnrlat=90,
                llcrnrlon=-180, urcrnrlon=180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 91, 45), labels=[True,False,False,True])
    m.drawmeridians(np.arange(-180, 180, 45), labels=[True,False,False,True])
    m.pcolormesh(longitude, latitude, data, latlon=True)
    cb = m.colorbar()
    cb.set_label('ppbv')

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1} at Pressure={2} hPa'.format(basename, DATAFIELD_NAME, pressure[1]))

    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #21
0
def main():

    varname_to_rpn_name = {
        "precipitation": "PR",
        "relativeError": "RERR"
    }

    varnames = list(varname_to_rpn_name.keys())

    target_dir = "/skynet3_rech1/huziy/from_hdf4"
    source_dir = "/st1_fs2/winger/Validation/TRMM/HDF_format"

    for f_name in os.listdir(source_dir):
        if not f_name.endswith("HDF"):
            continue

        path = os.path.join(source_dir, f_name)
        ds = SD(path)
        print(ds.datasets())
        target_path = os.path.join(target_dir, f_name + ".rpn")
        r_obj = RPN(target_path, mode="w")
        for varname in varnames:
            var_data = ds.select(varname)[0, :, :]
            r_obj.write_2D_field(
                name=varname_to_rpn_name[varname],
                data=var_data, label=varname, grid_type="L",
                ig = [25, 25, 4013, 18012])
        r_obj.close()
Example #22
0
def rainfall_anunal_GMX(year):

    file = glob.glob('/Users/yuewang/Documents/DATA/atl/ATL_3B42V7_rain_accum.'+ str(year)+'*')

    rainfall_0 = []
    for i in file:
        atl =SD(i,SDC.READ)
        rainfall = atl.select('RAIN_TOTAL')
        rainfall_value = rainfall.get()
        rainfall_0.append(rainfall_value)
    
    rainfall_single = np.array(rainfall_0)
    rainfall_anunal = sum(rainfall_single)
    rainfall_anunal_GMX = rainfall_anunal[280:320,340:400]
    
    ind = np.where(rainfall_anunal_GMX != 0)
    rf_annual = []
    for i,j in zip(*ind):
        mm = rainfall_anunal_GMX[i,j]
        rf_annual.append(mm)
    rf_annual = np.array(rf_annual)
    
    c = np.mean(rf_annual)
    
    return c
Example #23
0
def read_rrc(inpath):
    '''Read rrc data m*n from hdf file'''

    '''b1-5;b13-16 for MODIS Rrc
        Rrc_1238 Rrc_443-862 ozone senz solz for VIIRS rrc   
    '''  
    hdf = SD(inpath, SDC.READ)
    #dts = sorted(hdf.datasets().keys())
    modis_key = ['CorrRefl_01','CorrRefl_02','CorrRefl_03','CorrRefl_04','CorrRefl_05',
                 'CorrRefl_13','CorrRefl_14','CorrRefl_15','CorrRefl_16']
    viirs_key = ['Rrc_443','Rrc_486','Rrc_551','Rrc_671','Rrc_745','Rrc_862','Rrc_1238']
    mission = os.path.basename(inpath)[0]
    if mission =='A' or mission =='T':keys = modis_key
    elif mission=='V':keys = viirs_key
    else:keys = hdf.datasets().keys()
    for i,dt in enumerate(keys):
        print(i,dt)
        band = hdf.select(dt)[:,:]        
        if i==0:             
            limit = (band.shape[0],band.shape[1],len(keys))            
            rrc = np.zeros(limit,dtype = np.float)
            rrc[:,:,i] = band
        else:
            rrc[:,:,i] = band
    hdf.end()
    print(rrc.shape)
    return rrc
Example #24
0
def main(cal_file, with_cp):

    from pyhdf.SD import SD

    if with_cp:
        cmd = 'cp %s /home/noel/scratch/' % (cal_file)
        print "running "+cmd
        os.system(cmd)
        filename = os.path.basename(cal_file)
        cal_file = '/home/noel/scratch/' + filename
                        
    print 'Reading ' + cal_file 
    
    vars = ['Latitude', 'Longitude', 
            'Total_Attenuated_Backscatter_532', 'Attenuated_Backscatter_1064', 'Perpendicular_Attenuated_Backscatter_532',
            'Pressure', 'Temperature', 'Molecular_Number_Density', 'Tropopause_Height', 'Surface_Elevation']
    
    hdf = SD(cal_file)
    for var in vars:
        print 'Reading ' + var
        hdf_var = hdf.select(var)
        data = hdf_var.get()
        hdf_var.endaccess()
    hdf.end()
    
    print 'ok.'
    if with_cp:
        print 'Removing '+filename
        cmd = 'rm -f /home/noel/scratch/' + filename
        os.system(cmd)
Example #25
0
def rainfall_anunal_car(year):

    file = glob.glob('/Users/yuewang/Documents/DATA/atl/ATL_3B42V7_rain_accum.'+ str(year)+'*')

    rainfall_0 = []
    for i in file:
        atl =SD(i,SDC.READ)
        rainfall = atl.select('RAIN_TOTAL')
        rainfall_value = rainfall.get()
        rainfall_0.append(rainfall_value)
    
    rainfall_single = np.array(rainfall_0)
    rainfall_anunal = sum(rainfall_single)
    rainfall_anunal_car = rainfall_anunal[238:286,372:476]
    
# calculation none-zone mean value    
    ind = np.where(rainfall_anunal_car != 0)
    rf_annual = []
    for i,j in zip(*ind):
        mm = rainfall_anunal_car[i,j]
        rf_annual.append(mm)
    rf_annual = np.array(rf_annual)
    
    d = np.mean(rf_annual)
    
    return d
Example #26
0
 def load(self, fldname, **kwargs):
     """ Load Cali Current fields for a given day"""
     self._timeparams(**kwargs)
     
     if fldname == 'chl':
         filename = "/C%04i%03i_chl_mapped.hdf" % (self.yr, self.yd)
         #ncfieldname = 'chl_%04i_%03i' % (yr,yd)
         def scale(PV): return 10**(PV*0.015-2)
     elif fldname == 'sst':
         filename = "/M%04i%03i_sst_mapped.hdf" % (self.yr, self.yd)
         #ncfieldname = 'sst_%04i_%03i' % (yr,yd)            
         def scale(PV): return PV*0.15000001-3
     if not os.path.isfile(self.datadir + filename):
         print "Downloading " + filename
         self.download(fldname, self.jd)
         
     h = SD(self.datadir + filename,SDC.READ)        
     ncfieldname = h.datasets().keys()[0]
     fld =  h.select(ncfieldname)
     attr = fld.attributes()
     PV = fld[:].astype(np.float)
     PV[PV<0] = PV[PV<0]+256
     PV[PV==0]   = np.nan
     PV[PV==255] = np.nan
     setattr(self, fldname, scale(PV)[self.j1:self.j2, self.i1:self.i2])
Example #27
0
def read_amsr_hdf4(filename):
    from pyhdf.SD import SD, SDC
    from pyhdf.HDF import HDF, HC
    import pyhdf.VS 

    retv = AmsrObject()
    h4file = SD(filename, SDC.READ)
    datasets = h4file.datasets()
    attributes = h4file.attributes()
    #for idx,attr in enumerate(attributes.keys()):
    #    print idx, attr
    for sds in ["Longitude", "Latitude", "High_res_cloud"]:
        data = h4file.select(sds).get()
        if sds in ["Longitude", "Latitude"]:
            retv.all_arrays[sds.lower()] = data.ravel()
        elif sds in ["High_res_cloud"]:
            lwp_gain = h4file.select(sds).attributes()['Scale']
            retv.all_arrays["lwp_mm"] = data.ravel() * lwp_gain

        #print h4file.select(sds).info()
    h4file = HDF(filename, SDC.READ)
    vs = h4file.vstart()
    data_info_list = vs.vdatainfo()
    #print "1D data compound/Vdata"
    for item in data_info_list:
        #1D data compound/Vdata
        name = item[0]
        #print name
        if name in ["Time"]:
            data_handle = vs.attach(name)
            data = np.array(data_handle[:])
            retv.all_arrays["sec1993"] = data 
            data_handle.detach()
        else:
            pass
            #print name
        #data = np.array(data_handle[:])
        #attrinfo_dic = data_handle.attrinfo()
        #factor = data_handle.findattr('factor')
        #offset = data_handle.findattr('offset')
        #print data_handle.factor
        #data_handle.detach()
    #print data_handle.attrinfo()
    h4file.close()
    #for key in retv.all_arrays.keys():
    #    print key, retv.all_arrays[key]
    return retv
Example #28
0
    def read(self, filename, **kwargs):
        """Read the data"""
        from pyhdf.SD import SD
        import datetime

        #print "*** >>> Read the hdf-eos file!"
        root = SD(filename)
    
        # Get all the Attributes:
        # Common Attributes, Data Time,
        # Data Structure and Scene Coordinates
        for key in root.attributes().keys():
            self._eoshdf_info[key] = root.attributes()[key]

        # Start Time - datetime object
        starttime = datetime.datetime.strptime(self._eoshdf_info['Start Time'][0:13], 
                                               "%Y%j%H%M%S")
        msec = float(self._eoshdf_info['Start Time'][13:16])/1000.
        self.starttime = starttime + datetime.timedelta(seconds=msec)
    
        # End Time - datetime object
        endtime = datetime.datetime.strptime(self._eoshdf_info['End Time'][0:13], 
                                             "%Y%j%H%M%S")
        msec = float(self._eoshdf_info['End Time'][13:16])/1000.
        self.endtime = endtime + datetime.timedelta(seconds=msec)

        # What is the leading 'H' doing here?
        sensor_name = self._eoshdf_info['Sensor Name'][1:-1].lower()
        try:
            self.satid = EOS_SATELLITE[sensor_name]
        except KeyError:
            LOG.error("Failed setting the satellite id - sat-name = ", 
                      sensor_name)
            
        self.orbit = self._eoshdf_info['Orbit Number']
        self.shape = (self._eoshdf_info['Number of Scan Control Points'],
                      self._eoshdf_info['Number of Pixel Control Points'])

        #try:
        if 1:
            value = root.select(self.name)
            attr = value.attributes()
            data = value.get()

            self.attr = attr
            band = data
            if self.name in FLAGS_QUALITY:
                self.data = band
            else:
                nodata = attr['bad_value_scaled']
                self.data = (np.ma.masked_equal(band, nodata) * 
                             attr['slope'] + attr['intercept'])
            
            value.endaccess()
        #except:
        #    pass

        root.end()
        self.filled= True
def run(FILE_NAME):
    # Identify the data field.
    DATAFIELD_NAME = 'Longwave Flux (2.5R)'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        data = nc.variables[DATAFIELD_NAME][:].astype(np.float64)
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)
        
        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:,:]


    # Set fillvalue and units.
    # See "CERES Data Management System ES-4 Collection Guide" [1] and a sample
    # image by NASA [2] for details.  The fillvalue is 3.4028235E38.  Here, we
    # just use the max of the data.
    fillvalue = np.max(data)
    data[data == fillvalue] = np.nan
    datam = np.ma.masked_array(data, mask=np.isnan(data))
    
    # Set fillvalue and units.
    # See "CERES Data Management System ES-4 Collection Guide" [1] and a
    # sample image by NASA [2] for details.
    # The fillvalue is 3.4028235E38. Here, we use max value from the dataset.
    units = 'Watts/Meter^2'
    ysize, xsize = data.shape
    xinc = 360.0 / xsize
    yinc = 180.0 / ysize
    x0, x1 = (-180, 180)
    y0, y1 = (-90, 90)
    longitude = np.linspace(x0 + xinc/2, x1 - xinc/2, xsize)
    latitude = np.linspace(y0 + yinc/2, y1 - yinc/2, ysize)
    
    # Flip the latitude to run from 90 to -90
    latitude = latitude[::-1]
    
    # The data is global, so render in a global projection.
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-90, urcrnrlat=90,
                llcrnrlon=-180, urcrnrlon=180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90.,90,45))
    m.drawmeridians(np.arange(-180.,180,45), labels=[True,False,False,True])
    m.pcolormesh(longitude, latitude, datam, latlon=True)
    cb = m.colorbar()

    cb.set_label(units)

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #30
0
def get_lat_lon_thin_modis(satscene, options):
    """Read lat and lon.
    """
    filename = satscene.time_slot.strftime("thin_MYD03.A%Y%j.%H%M.005.NRT.hdf")
    filename = os.path.join(options["dir"], filename)

    data = SD(filename)
    lat = data.select("Latitude")
    fill_value = lat.attributes()["_FillValue"]
    lat = np.ma.masked_equal(lat.get(), fill_value)
    lon = data.select("Longitude")
    fill_value = lon.attributes()["_FillValue"]
    lon = np.ma.masked_equal(lon.get(), fill_value)

    

    return lat, lon
Example #31
0
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'sur_refl_b01_1'

    if USE_GDAL:
        import gdal
        GRID_NAME = 'MODIS_Grid_500m_2D'
        gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(
            FILE_NAME, GRID_NAME, DATAFIELD_NAME)
        gdset = gdal.Open(gname)
        data = gdset.ReadAsArray().astype(np.float64)

        # Construct the grid.
        x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform()
        nx, ny = (gdset.RasterXSize, gdset.RasterYSize)
        x = np.linspace(x0, x0 + xinc * nx, nx)
        y = np.linspace(y0, y0 + yinc * ny, ny)
        xv, yv = np.meshgrid(x, y)

        # In basemap, the sinusoidal projection is global, so we won't use it.
        # Instead we'll convert the grid back to lat/lons.
        sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext")
        wgs84 = pyproj.Proj("+init=EPSG:4326")
        lon, lat = pyproj.transform(sinu, wgs84, xv, yv)

        # Read the attributes.
        meta = gdset.GetMetadata()
        long_name = meta['long_name']
        units = meta['units']
        _FillValue = np.float(meta['_FillValue'])
        scale_factor = np.float(meta['scale_factor'])
        valid_range = [np.float(x) for x in meta['valid_range'].split(', ')]

        del gdset
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:, :].astype(np.double)

        # Read geolocation dataset from HDF-EOS2 dumper output.
        GEO_FILE_NAME = 'lat_MOD09GA.A2007268.h10v08.005.2007272184810_MODIS_Grid_500m_2D.output'
        GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'],
                                     GEO_FILE_NAME)
        lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0])
        lat = lat.reshape(data.shape)

        GEO_FILE_NAME = 'lon_MOD09GA.A2007268.h10v08.005.2007272184810_MODIS_Grid_1km_2D.output'
        GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'],
                                     GEO_FILE_NAME)
        lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0])
        lon = lon.reshape(data.shape)

        # Read attributes.
        attrs = data2D.attributes(full=1)
        lna = attrs["long_name"]
        long_name = lna[0]
        vra = attrs["valid_range"]
        valid_range = vra[0]
        fva = attrs["_FillValue"]
        _FillValue = fva[0]
        sfa = attrs["scale_factor"]
        scale_factor = sfa[0]
        ua = attrs["units"]
        units = ua[0]

    invalid = np.logical_or(data > valid_range[1], data < valid_range[0])
    invalid = np.logical_or(invalid, data == _FillValue)
    data[invalid] = np.nan
    data = data / scale_factor
    data = np.ma.masked_array(data, np.isnan(data))

    m = Basemap(projection='cyl',
                resolution='h',
                llcrnrlat=-2.5,
                urcrnrlat=12.5,
                llcrnrlon=-82.5,
                urcrnrlon=-67.5)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(0, 15, 5), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-80, -65, 5), labels=[0, 0, 0, 1])
    m.pcolormesh(lon, lat, data, latlon=True)
    cb = m.colorbar()
    cb.set_label(units)

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, long_name))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.sur_refl_b01_1.py.png".format(basename)
    fig.savefig(pngfile)
Example #32
0
import cartopy.crs as ccrs
import numpy as np
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
FILE_NAME = 'D:/f_work/E_public/E1_wangwei/data/MOD16A2/MOD16A2.A2001009.h28v06.006.2017068170401.hdf'

from pyhdf.SD import SD, SDC
#from pyhdf.SD import SD, SDC
hdf = SD(FILE_NAME, SDC.READ)

# Get global attribute dictionnary
attr = hdf.attributes(full=1)
# Get dataset dictionnary
tables = hdf.datasets()
DATAFIELD_NAME = tuple(tables.keys())[0]
# Read dataset.
data_raw = hdf.select(DATAFIELD_NAME)
data = data_raw[:, :].astype(np.double)

# Read lat/lon.
xdim = hdf.select('XDim')
lon = xdim[:].astype(np.double)

ydim = hdf.select('YDim')
lat = ydim[:].astype(np.double)

# Retrieve attributes.
attrs = data_raw.attributes(full=1)
lna = attrs["long_name"]
long_name = lna[0]
aoa = attrs["add_offset"]
add_offset = aoa[0]
Example #33
0
	def cimport_data(self, idata_name=('V50M', 'U50M')):
		'''Import ten-year data. 
		Args:
			idata_name: the data name.
		Returns: 
			self.dict_ref_wind: imported data.
		'''
		year_index = range(self.start_year, self.end_year + 1)
		site_num = len(self.site_index)
		zero = str(0)
		for each_siteth in range(1, site_num + 1):
			self.dict_ref_wind[each_siteth] = {}
			for each_year in year_index:
				self.dict_ref_wind[each_siteth][each_year] = {}
				for each_month in WindData.month_name:
					self.dict_ref_wind[each_siteth][each_year][each_month] = np.empty((0,24), np.float32)
		for each_year in year_index:
			if ((each_year % 400 == 0) or ((each_year % 4 == 0) and (each_year % 100 != 0))):
				year_feature = WindData.leap_year
			else:
				year_feature = WindData.nonleap_year
			for each_month in WindData.month_name:
				if ((each_year == 2010) and (each_month in WindData.spl_month2010)):
					fnames = self.file_name + WindData.fnamesa
				else:
					fnames = self.file_name + WindData.fnamesb 
				day_s = year_feature[each_month][1]
				day_e = year_feature[each_month][2]
				if year_feature[each_month][3]:
					for each_day in range(day_s, day_e + 1):
						fname = fnames + str(each_year) + zero + str(each_day) + WindData.fnamee
						print fname
						fid = SD(fname)
						tmpv = fid.select(idata_name[0])[:, :, :]
						tmpu = fid.select(idata_name[1])[:, :, :]
						fid.end()
						for each_siteth in range(1, site_num + 1):
							tmpv_site = tmpv[:, self.site_index[each_siteth - 1][0], \
							self.site_index[each_siteth - 1][1]].reshape(1, -1)
							tmpu_site = tmpu[:, self.site_index[each_siteth - 1][0], \
							self.site_index[each_siteth - 1][1]].reshape(1, -1)
							tmpwind_site = self.cuv2speed(tmpv_site, tmpu_site)
							self.dict_ref_wind[each_siteth][each_year][each_month] = \
							np.vstack((self.dict_ref_wind[each_siteth][each_year][each_month], tmpwind_site))
				else:
					for each_day in range(day_s, day_e + 1):
						fname = fnames + str(each_year) + str(each_day) + WindData.fnamee
						print fname
						fid = SD(fname)
						tmpv = fid.select(idata_name[0])[:, :, :]
						tmpu = fid.select(idata_name[1])[:, :, :]
						fid.end()
						for each_siteth in range(1, site_num + 1):
							tmpv_site = tmpv[:, self.site_index[each_siteth - 1][0], \
							self.site_index[each_siteth-1][1]].reshape(1, -1)
							tmpu_site = tmpu[:, self.site_index[each_siteth - 1][0], \
							self.site_index[each_siteth-1][1]].reshape(1, -1)
							tmpwind_site = self.cuv2speed(tmpv_site,tmpu_site)
							self.dict_ref_wind[each_siteth][each_year][each_month] = \
							np.vstack((self.dict_ref_wind[each_siteth][each_year][each_month], tmpwind_site))
		return self.dict_ref_wind
Example #34
0
def run(FILE_NAME):
    GEO_FILE_NAME = 'MYD03.A2002226.0000.005.2009193071127.hdf'
    GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME)

    DATAFIELD_NAME = 'EV_Band26'

    if USE_NETCDF4:    
        from netCDF4 import Dataset    
        nc = Dataset(FILE_NAME)

        # Read dataset.
        data = nc.variables[DATAFIELD_NAME][:,:].astype(np.float64)

        # Retrieve the geolocation data from MYD03 product.
        nc_geo = Dataset(GEO_FILE_NAME)
        longitude = nc_geo.variables['Longitude'][:]
        latitude = nc_geo.variables['Latitude'][:]
        
        # Retrieve attributes.
        units = nc.variables[DATAFIELD_NAME].radiance_units
        long_name = nc.variables[DATAFIELD_NAME].long_name

        # The scale and offset attributes do not have standard names in this 
        # case, so we have to apply the scaling equation ourselves.
        scale_factor = nc.variables[DATAFIELD_NAME].radiance_scales
        add_offset = nc.variables[DATAFIELD_NAME].radiance_offsets
        valid_range = nc.variables[DATAFIELD_NAME].valid_range
        _FillValue = nc.variables[DATAFIELD_NAME]._FillValue
        valid_min = valid_range[0]
        valid_max = valid_range[1]


    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:,:].astype(np.double)

        hdf_geo = SD(GEO_FILE_NAME, SDC.READ)

        # Read geolocation dataset from MOD03 product.
        lat = hdf_geo.select('Latitude')
        latitude = lat[:,:]
        lon = hdf_geo.select('Longitude')
        longitude = lon[:,:]
        
        # Retrieve attributes.
        attrs = data2D.attributes(full=1)
        lna=attrs["long_name"]
        long_name = lna[0]
        aoa=attrs["radiance_offsets"]
        add_offset = aoa[0]
        fva=attrs["_FillValue"]
        _FillValue = fva[0]
        sfa=attrs["radiance_scales"]
        scale_factor = sfa[0]
        vra=attrs["valid_range"]
        valid_min = vra[0][0]        
        valid_max = vra[0][1]        
        ua=attrs["radiance_units"]
        units = ua[0]

    invalid = np.logical_or(data > valid_max,
                            data < valid_min)
    invalid = np.logical_or(invalid, data == _FillValue)
    data[invalid] = np.nan
    data = (data - add_offset) * scale_factor 
    data = np.ma.masked_array(data, np.isnan(data))
    
    # The data is close to the equator in Africa, so a global projection is
    # not needed.
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-5, urcrnrlat=30, llcrnrlon=5, urcrnrlon=45)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(0, 50, 10), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(0, 50., 10), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, data, latlon=True)
    cb=m.colorbar()
    cb.set_label(units, fontsize=8)

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, 'Radiance derived from ' + long_name), fontsize=11)
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.evband26.py.png".format(basename)
    fig.savefig(pngfile)
Example #35
0
# 批量读取HDF文件,提取AOD值,并将结果添加到列表中
file_name = os.listdir(file_path)  # 文件名

i = 0
for hdf in file_name:
    i = i + 1
    HDF_FILE_URL = file_path + hdf
    file = SD(HDF_FILE_URL)
    data_sets_dic = file.datasets()
    '''
    #输出数据集名称
    for idx, sds in enumerate(data_sets_dic.keys()):
       print(idx, sds)
    '''
    sds_obj1 = file.select('Longitude')  # 选择经度
    sds_obj2 = file.select('Latitude')  # 选择纬度
    sds_obj3 = file.select('Optical_Depth_Land_And_Ocean')  # 产品质量最高的AOD数据集
    longitude = sds_obj1.get()  # 读取数据
    latitude = sds_obj2.get()
    aod = sds_obj3.get()
    writer = pd.ExcelWriter(output_file_path + '%s.xlsx' % hdf)

    longitude = pd.DataFrame(longitude)  # 格式转换
    latitude = pd.DataFrame(latitude)
    aod = pd.DataFrame(aod)

    longitude.to_excel(writer, sheet_name='longitude')
    latitude.to_excel(writer, sheet_name='latitude')
    aod.to_excel(writer, sheet_name='aod')
    writer.close()
Example #36
0
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Snow_Cover'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        # Subset the data to match the size of the swath geolocation fields.
        rows = slice(5, 4060, 10)
        cols = slice(5, 2708, 10)
        data = nc.variables['Snow_Cover'][rows, cols]
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:, :].astype(np.float64)

        # Read geolocation dataset from HDF-EOS2 dumper output.
        GEO_FILE_NAME = 'lat_MOD10_L2.A2000065.0040.005.2008235221207.output'
        GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'],
                                     GEO_FILE_NAME)
        lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0])
        latitude = lat.reshape(data.shape)

        GEO_FILE_NAME = 'lon_MOD10_L2.A2000065.0040.005.2008235221207.output'
        GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'],
                                     GEO_FILE_NAME)
        lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0])
        longitude = lon.reshape(data.shape)

    # Draw a polar stereographic projection using the low resolution coastline
    # database.
    m = Basemap(projection='npstere', resolution='l', boundinglat=64, lon_0=0)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(60., 81, 10.))
    m.drawmeridians(np.arange(-180., 181., 30.),
                    labels=[True, False, False, True])

    # Use a discretized colormap since we have only two levels.
    cmap = mpl.colors.ListedColormap(['grey', 'mediumblue'])
    bounds = [0, 19.5, 39]
    norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
    m.pcolormesh(longitude, latitude, data, latlon=True, cmap=cmap, norm=norm)

    # Must reset the alpha level to opaque for the colorbar.
    # See http://stackoverflow.com/questions/4478725/...
    # .../partially-transparent-scatter-plot-but-with-a-solid-color-bar
    color_bar = plt.colorbar()
    color_bar.set_alpha(1)
    color_bar.set_ticks([9.75, 29.25])
    color_bar.set_ticklabels(['missing data', 'ocean'])
    color_bar.draw_all()

    basename = os.path.basename(FILE_NAME)
    long_name = 'Snow Cover'
    plt.title('{0}\n{1}'.format(basename, long_name))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #37
0
def hdf_read_example():
    "hdf_read_example(): function to read a HDF file, take an extract and plot it in a graph. Default is ifile and sdsName"

    ifile = 'C:\\Users\\Janie\\Downloads\\MOD09GA.A2004154.h19v10.005.2008152141836.hdf'
    sdsName = 'sur_refl_b01_1'
    ofile = ''

    xmin = 0
    xmax = 2000
    ymin = 0
    ymax = 2000

    # default plot value
    plot = 1

    if options.ifile:
        ifile = options.ifile

    if options.ofile:
        ofile = options.ofile

    if options.xmin:
        xmin = options.xmin

    if options.xmax:
        xmax = options.xmax

    if options.ymin:
        ymin = options.ymin

    if options.ymax:
        ymax = options.ymax

    if options.sdsName:
        sdsName = options.sdsName

    # read sds
    md = SD(ifile, SDC.READ)
    sds = md.select(sdsName)

    if options.v:
        # o/p file datasets
        sys.stderr.write('ifile: %s\n' % (ifile))
        md.datasets()

    if options.plot:
        # o/p file datasets
        plot = 1

    if plot:
        ex = sds[xmin:xmax, ymin:ymax]
        np.clip(ex, 0., 10000, out=ex)
        imshow(ex)
        plt.colorbar(drawedges="True")
        plt.title('%s' % (sdsName))
        plt.ylabel("Row")
        plt.xlabel("Col")
        if ofile:
            # will save as emf, eps, pdf, png, ps, raw, rgba, svg, svgz
            plt.savefig(ofile)
        else:
            plt.show()
Example #38
0
# In[4]:


from a301.scripts import week5_test
week5_test.main()


# In[5]:


# Read the lats and lons from the MYD03 file
generic_rad = a301.data_dir / Path('rad_file_2018_10_1.hdf')
generic_m3 = a301.data_dir / Path('m3_file_2018_10_1.hdf')
print(f'reading {generic_m3}')
m3_file = SD(str(generic_m3), SDC.READ)
lats = m3_file.select('Latitude').get()
lons = m3_file.select('Longitude').get()
m3_file.end()


# In[6]:


#Read ch30 from the generic_rad file
rad_file = SD(str(generic_rad), SDC.READ)
ch30 = rad_file.select('ch30').get()
rad_file.end()


# # Calculate chan 30 brightness temperature
# 
Example #39
0
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'NDSI_Snow_Cover'
    
    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        # Subset the data to match the size of the swath geolocation fields.
        rows = slice(5, 4060, 10)
        cols = slice(5, 2708, 10)
        data = nc.variables[DATAFIELD_NAME][rows, cols]
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        # Use the following for low resolution.
        # This is good for low memory machine.
        rows = slice(5, 4060, 10)
        cols = slice(5, 2708, 10)
        data = data2D[rows, cols]
        latitude = hdf.select('Latitude')[:]
        longitude = hdf.select('Longitude')[:]
        
        # Read dataset attribute.
        attrs = data2D.attributes(full=1)
        lna = attrs["long_name"]
        long_name= lna[0]

        # Use the following for high resolution.
        # This may not work for low memory machine.
#        data = data2D[:,:]
        # Read geolocation dataset from HDF-EOS2 dumper output.
#        GEO_FILE_NAME = 'lat_MOD10_L2.A2000065.0040.005.2008235221207.output'
#        lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0])
#        latitude = lat.reshape(data.shape)
        
#        GEO_FILE_NAME = 'lon_MOD10_L2.A2000065.0040.005.2008235221207.output'
#        lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0])
#        longitude = lon.reshape(data.shape)

    # Draw a polar stereographic projection using the low resolution coastline
    # database.
    m = Basemap(projection='npstere', resolution='l',
                boundinglat=64, lon_0 = 0)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(60.,81,10.))
    m.drawmeridians(np.arange(-180.,181.,30.), labels=[True,False,False,True])
    #  Key: = 0-100=ndsi snow, 200=missing data, 201=no decision, 211=night, 237=inland water, 239=ocean, 250=cloud, 254=detector saturated, 255=fill
    # Use a discretized colormap since we have only two levels.
    cmap = colors.ListedColormap(['purple', 'blue'])
    # Define the bins and normalize for discrete colorbar.
    bounds = np.array([211.0,237.0,239.0])
    norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
    m.pcolormesh(longitude, latitude, data, latlon=True, cmap=cmap, norm=norm)
    color_bar = plt.colorbar()

    # Must reset the alpha level to opaque for the colorbar.
    # See http://stackoverflow.com/questions/4478725/...
    # .../partially-transparent-scatter-plot-but-with-a-solid-color-bar
    color_bar.set_alpha(1)
    
    # Put label in the middle.
    color_bar.set_ticks([224.0, 238.0])
    color_bar.set_ticklabels(['night', 'inland water'])
    color_bar.draw_all()

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, long_name))
    fig = plt.gcf()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #40
0
def load_modis():

    dataPath = "/home/mrilee/data/MODIS/"

    # MOD03.A2005349.2120.061.2017187042837.hdf  MOD05_L2.A2005349.2120.061.2017294065852.hdf
    # MOD03.A2005349.2125.061.2017187042720.hdf  MOD05_L2.A2005349.2125.061.2017294065400.hdf

    modis_base = "MOD05_L2."

    # 1 modis_item       = "A2005349.2120.061.2017294065852"
    # 1 modis_time_start = "2005-12-15T21:20:00"

    modis_item = "A2005349.2125.061.2017294065400"
    modis_time_start = "2005-12-15T21:25:00"
    modis_geofilename = "MOD03.A2005349.2125.061.2017187042720.hdf"

    modis_suffix = ".hdf"
    modis_filename = modis_base + modis_item + modis_suffix

    fmt_suffix = ".h5"
    workFileName = "sketchG." + modis_base + modis_item + fmt_suffix

    key_across = 'Cell_Across_Swath_1km:mod05'
    key_along = 'Cell_Along_Swath_1km:mod05'

    print('loading: ', dataPath + modis_filename)
    hdf = SD(dataPath + modis_filename, SDC.READ)
    ds_wv_nir = hdf.select('Water_Vapor_Near_Infrared')
    data = ds_wv_nir.get()

    # MODIS_Swath_Type_GEO/Geolocation_Fields/
    # Latitude

    hdf_geo = SD(dataPath + modis_geofilename, SDC.READ)
    print('hg info: ', hdf_geo.info())
    # for idx,sds in enumerate(hdf_geo.datasets().keys()):
    #     print(idx,sds)
    # hdf_geo_ds = hdf_geo.select['']

    # hdf_geo_swath     = hdf_geo.select('MODIS_Swath_Type_GEO')
    # hdf_geo_swath_gf  = hdf_geo_swath['Geolocation_Fields']
    hdf_geo_lat = hdf_geo.select('Latitude').get()
    hdf_geo_lon = hdf_geo.select('Longitude').get()
    print('hgl type  ', type(hdf_geo_lat))
    print('hgl shape ', hdf_geo_lat.shape, hdf_geo_lon.shape)
    print('hgl dtype ', hdf_geo_lat.dtype)
    # exit()

    add_offset = ds_wv_nir.attributes()['add_offset']
    scale_factor = ds_wv_nir.attributes()['scale_factor']
    print('scale_factor = %f, add_offset = %f.' % (scale_factor, add_offset))
    data = (data - add_offset) * scale_factor
    print('data mnmx: ', np.amin(data), np.amax(data))

    nAlong = ds_wv_nir.dimensions()[key_along]
    nAcross = ds_wv_nir.dimensions()[key_across]
    print('ds_wv_nir nAlong,nAcross: ', nAlong, nAcross)

    dt = np.array([modis_time_start], dtype='datetime64[ms]')
    t_resolution = 26  # 5 minutes resolution? 2+6+10+6+6
    tid = ps.from_utc(dt.astype(np.int64), t_resolution)
    # print(np.arange(np.datetime64("2005-12-15T21:20:00"),np.datetime64("2005-12-15T21:25:00")))
    # exit()

    fill_value = ds_wv_nir.attributes()['_FillValue']

    mod_lat = hdf_geo_lat.flatten()
    mod_lon = hdf_geo_lon.flatten()
    mod_dat = data.flatten()

    return mod_lon, mod_lat, mod_dat  # load_modis
Example #41
0
def run(FILE_NAME):

    DATAFIELD_NAME = 'Sea_Ice_by_Reflectance_SP'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
        ncvar = nc.variables[DATAFIELD_NAME]
        data = ncvar[:].astype(np.float64)
        gridmeta = getattr(nc, 'StructMetadata.0')
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:, :].astype(np.float64)

        # Read global attribute.
        fattrs = hdf.attributes(full=1)
        ga = fattrs["StructMetadata.0"]
        gridmeta = ga[0]

    # Construct the grid.  The needed information is in a global attribute
    # called 'StructMetadata.0'.  Use regular expressions to tease out the
    # extents of the grid.
    ul_regex = re.compile(
        r'''UpperLeftPointMtrs=\(
                              (?P<upper_left_x>[+-]?\d+\.\d+)
                              ,
                              (?P<upper_left_y>[+-]?\d+\.\d+)
                              \)''', re.VERBOSE)
    match = ul_regex.search(gridmeta)
    x0 = np.float(match.group('upper_left_x'))
    y0 = np.float(match.group('upper_left_y'))

    lr_regex = re.compile(
        r'''LowerRightMtrs=\(
                              (?P<lower_right_x>[+-]?\d+\.\d+)
                              ,
                              (?P<lower_right_y>[+-]?\d+\.\d+)
                              \)''', re.VERBOSE)
    match = lr_regex.search(gridmeta)
    x1 = np.float(match.group('lower_right_x'))
    y1 = np.float(match.group('lower_right_y'))

    ny, nx = data.shape
    x = np.linspace(x0, x1, nx)
    y = np.linspace(y0, y1, ny)
    xv, yv = np.meshgrid(x, y)

    # Reproject into latlon
    # Reproject the coordinates out of lamaz into lat/lon.
    lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=-90 +lon_0=0 +units=m")
    wgs84 = pyproj.Proj("+init=EPSG:4326")
    lon, lat = pyproj.transform(lamaz, wgs84, xv, yv)

    # Use a south polar azimuthal equal area projection.
    m = Basemap(projection='splaea',
                resolution='l',
                boundinglat=-20,
                lon_0=180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 0, 15), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180, 180, 45), labels=[0, 0, 0, 1])

    # Use a discretized colormap since we have only a few levels.
    # 0=missing data
    # 1=no decision
    # 11=night
    # 25=land
    # 37=inland water
    # 39=ocean
    # 50=cloud
    # 200=sea ice
    # 253=no input tile expected
    # 254=non-production mask"
    lst = [
        '#727272', '#b7b7b7', '#ffff96', '#00ff00', '#232375', '#232375',
        '#63c6ff', '#ff0000', '#3f3f3f', '#000000'
    ]
    cmap = mpl.colors.ListedColormap(lst)
    bounds = [0, 1, 11, 25, 37, 39, 50, 200, 253, 254, 255]
    norm = mpl.colors.BoundaryNorm(bounds, cmap.N)

    # Render only a subset of the mesh.
    rows = slice(500, 4000, 5)
    cols = slice(500, 4000, 5)
    m.pcolormesh(lon[rows, cols],
                 lat[rows, cols],
                 data[rows, cols],
                 latlon=True,
                 cmap=cmap,
                 norm=norm)

    color_bar = plt.colorbar()
    color_bar.set_ticks([0.5, 5.5, 18, 31, 38, 44.5, 125, 226.5, 253.5, 254.5])
    color_bar.set_ticklabels([
        'missing', 'no decision', 'night', 'land', 'inland water', 'ocean',
        'cloud', 'sea ice', 'no input tile\nexpected', 'non-production\nmask'
    ])
    color_bar.draw_all()

    basename = os.path.basename(FILE_NAME)
    long_name = DATAFIELD_NAME

    plt.title('{0}\n{1}'.format(basename, long_name))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #42
0
"""

import os
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
import numpy as np
from pyhdf.SD import SD, SDC

# Open HDF4 file.
FILE_NAME = 'A2002185000000.L2_LAC_SST.hdf'
hdf = SD(FILE_NAME, SDC.READ)

# Read dataset.
DATAFIELD_NAME = 'sst'
data = hdf.select(DATAFIELD_NAME)
lat = hdf.select('latitude')
latnp = lat[:, :]
lon = hdf.select('longitude')[:, :]
lonnp = lon[:, :]

# Data size is nLw_443[2724][1968] while lat/lon size is lat[2724][247].
# We need to blow up lat/lon values using control point columns parameter
# to match data size.

# Read global attributes
gattrs = hdf.attributes(full=1)
nopcp = gattrs['Number of Pixel Control Points']
cpc = hdf.select('cntl_pt_cols')
step1 = cpc[2] - cpc[1]
step2 = cpc[nopcp[0] - 1] - cpc[nopcp[0] - 2]
Example #43
0
    def __init__(self, p_myd021km_filename):
        self.succeed = False
        self.filename = p_myd021km_filename

        # 初始化图像
        # 读取modis——hdf
        try:
            myd021km_data = SD(self.filename)
            # 1.SDS Reading
            ev_250_SDS = myd021km_data.select('EV_250_Aggr1km_RefSB')  # 经纬度
            ev_500_SDS = myd021km_data.select('EV_500_Aggr1km_RefSB')  # 经纬度
            ev_1kmRef_SDS = myd021km_data.select('EV_1KM_RefSB')  # 经纬度
            ev_1km_SDS = myd021km_data.select('EV_1KM_Emissive')  # 经纬度
            '''250'''
            ev_250 = ev_250_SDS.get()  # 2
            for key, value in ev_250_SDS.attributes().items():
                if key == 'reflectance_offsets':
                    ev_250_offset = value
                if key == 'reflectance_scales':
                    ev_250_scale = value
            '''500'''
            ev_500 = ev_500_SDS.get()  # 2
            for key, value in ev_500_SDS.attributes().items():
                if key == 'reflectance_offsets':
                    ev_500_offset = value
                if key == 'reflectance_scales':
                    ev_500_scale = value
            '''100Ref'''
            ev_1kmRef = ev_1kmRef_SDS.get()  # 2
            for key, value in ev_1kmRef_SDS.attributes().items():
                if key == 'radiance_offsets':
                    ev_1kmRef_offset = value
                if key == 'radiance_scales':
                    ev_1kmRef_scale = value
            '''100EV'''
            ev_1km = ev_1km_SDS.get()  # 2
            for key, value in ev_1km_SDS.attributes().items():
                if key == 'radiance_offsets':
                    ev_1km_offset = value
                if key == 'radiance_scales':
                    ev_1km_scale = value

            # pack
            self.band_num = ev_250.shape[0] + ev_500.shape[
                0] + ev_1kmRef.shape[0] + ev_1km.shape[0]
            self.width = ev_250.shape[1]
            self.height = ev_250.shape[2]
            self.all_band = np.empty([self.band_num, self.width, self.height],
                                     dtype=np.uint16)
            self.scale = np.empty(self.band_num)
            self.offset = np.empty(self.band_num)
            now_band = 0
            # 250
            for idx in range(ev_250.shape[0]):
                self.all_band[now_band, ...] = ev_250[idx, ...]
                self.scale[now_band] = ev_250_scale[idx]
                self.offset[now_band] = ev_250_offset[idx]
                now_band += 1
            # print(now_band)
            # 500
            for idx in range(ev_500.shape[0]):
                self.all_band[now_band, ...] = ev_500[idx, ...]
                self.scale[now_band] = ev_500_scale[idx]
                self.offset[now_band] = ev_500_offset[idx]
                now_band += 1
            # print(now_band)
            # 1kmref
            for idx in range(ev_1kmRef.shape[0]):
                self.all_band[now_band, ...] = ev_1kmRef[idx, ...]
                self.scale[now_band] = ev_1kmRef_scale[idx]
                self.offset[now_band] = ev_1kmRef_offset[idx]
                now_band += 1
            # print(now_band)
            # 1km
            for idx in range(ev_1km.shape[0]):
                self.all_band[now_band, ...] = ev_1km[idx, ...]
                self.scale[now_band] = ev_1km_scale[idx]
                self.offset[now_band] = ev_1km_offset[idx]
                now_band += 1
            # print(now_band)
            self.succeed = True
        except:
            print('Modis READ ERROR......(myd021km):' + self.filename)
            print("Unexpected error:", sys.exc_info()[0])
            self.isValid = False
Example #44
0
        481410057:{ 'name': 'Socorro Hueco',        'lat': 31.6675000, 'lon': -106.2880000 },
        481410058:{ 'name': 'Skyline Park',         'lat': 31.8939133, 'lon': -106.4258270 },
        481410693:{ 'name': 'VanBuren',             'lat': 31.8133700, 'lon': -106.4645200 },
        481411011:{ 'name': 'El Paso Delta',        'lat': 31.7584760, 'lon': -106.4073460 },
        481411021:{ 'name': 'Ojo De Agua',          'lat': 31.8624700, 'lon': -106.5473000 },
        }

    base_dir = './hdf/20180101-20191101/'
    flist = sorted(os.listdir(base_dir))
    features = []
    # do this for each file
    for fname in flist:

        f = SD(base_dir + fname, SDC.READ)
        ### select sds obj
        lat_obj = f.select('Latitude')
        lon_obj = f.select('Longitude')
        lst_obj = f.select('LST')
        vt_obj  = f.select('View_time')

        ### get sds data
        lat_data = lat_obj.get()
        lon_data = lon_obj.get()
        lst_data = lst_obj.get()
        vt_data  = vt_obj.get()

        # filter hdf points by loacation bound
        inbound = []
        year, day, hour = infoFromFilename(fname)
        for i in range(np.shape(lat_data)[0]):
            for j in range(np.shape(lat_data)[1]):
print(swath_info)

# %% [markdown]
# * Read the projection details and data

# %%
myd03_file_name = list(a301_lib.sat_data.glob("MYD03*2105*hdf"))[0]
print(myd03_file_name)
myd02_file_name = list(a301_lib.sat_data.glob("MYD02*2105*hdf"))[0]
print(myd02_file_name)

# %%
# Read the lats and lons from the MYD03 file
print(f"reading {myd03_file_name}")
m3_file = SD(str(myd03_file_name), SDC.READ)
lats = m3_file.select("Latitude").get()
lons = m3_file.select("Longitude").get()
m3_file.end()

# %%
the_file = SD(str(myd02_file_name), SDC.READ)
longwave_data = the_file.select("EV_1KM_Emissive")
longwave_bands = the_file.select("Band_1KM_Emissive")
band_nums = longwave_bands.get()
ch30_index = np.searchsorted(band_nums, 30.0)
print(f"make sure our index datatime in int64: {ch30_index.dtype}")
ch30_index = int(ch30_index)
scales = longwave_data.attributes()["radiance_scales"]
offsets = longwave_data.attributes()["radiance_offsets"]
ch30_scale = scales[ch30_index]
ch30_offset = offsets[ch30_index]
Example #46
0
modz = []
modlat = []
modlon = []

plt.figure()
for i in range(len(filelist)):
    data = SD(filelist[i], SDC.READ)
    time = filelist[i][filelist[i].find('MYD021KM.A') +
                       18:filelist[i].find('MYD021KM.A') + 22]

    for j in range(len(coorlist)):
        if coorlist[j].find(time) > 0:
            coor = SD(coorlist[j], SDC.READ)

    selected_sds = data.select('EV_250_Aggr1km_RefSB')
    selected_sds_attributes = selected_sds.attributes()

    for key, value in selected_sds_attributes.iteritems():
        if key == 'reflectance_scales':
            reflectance_scales_250_Aggr1km_RefSB = np.asarray(value)
        if key == 'reflectance_offsets':
            reflectance_offsets_250_Aggr1km_RefSB = np.asarray(value)

    sds_data_250_Aggr1km_RefSB = selected_sds.get()

    selected_sds = data.select('EV_500_Aggr1km_RefSB')
    selected_sds_attributes = selected_sds.attributes()

    for key, value in selected_sds_attributes.iteritems():
        if key == 'reflectance_scales':
def run(FILE_NAME, OUT_PATH):
    # Identify the data field.
    DATAFIELD_NAME = 'Feature_Classification_Flags'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)

        # Subset the data to match the size of the swath geolocation fields.
        # Turn off autoscaling, we'll handle that ourselves due to presence of
        # a valid range.
        var = nc.variables[DATAFIELD_NAME]
        data = var[:, :]
        print data.shape

        # Read geolocation datasets.
        lat = nc.variables['Latitude'][:]
        long = nc.variables['Longitude'][:]

        lat = np.squeeze(lat)
        long = np.squeeze(long)
        print lat.shape
    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:, :]
        print data.shape

        # Read geolocation datasets.
        latitude = hdf.select('Latitude')
        lat = latitude[:]

        longitude = hdf.select('Longitude')
        long = longitude[:]

    # Extract Feature Type only (1-3 bits) through bitmask.
    data_feature_type = data & 7

    #   if datanew[datanew = 3 == 3

    # datashifted = data >> 9

    #data_sub_feature_type = datashifted & 7

    #data_feature_type[data_feature_type > 3] = 0;
    #data_feature_type[data_feature_type < 3] = 0;
    #data_feature_type[data_feature_type == 3] = 1;

    #data_sub_feature_type[data_feature_type != 1] = 0

    #  data = data.equal(5,5,-9)

    #  data.

    print data_feature_type.shape

    #    for t in range[0]

    #print bin(datashiftednew[0][i])

    #    for i in range(0,len(data[:])):
    #
    #      for t in range(0,len(data[i][:])):
    #
    #        print 'record'
    #        print i
    #    #    print datanew[0][i]
    #        print data[i][t]
    #        print format(data[i][t], '#017b')
    #        print data_feature_type[i][t]
    #        print format(data_feature_type[i][t], '#017b')
    #   #     print format(datashifted[i][t], '#017b')
    #        print data_sub_feature_type[i][t]
    #        print format(data_sub_feature_type[i][t], '#017b')
    #
    #      break

    # Subset latitude values for the region of interest (40N to 62N).
    # See the output of CAL_LID_L2_VFM-ValStage1-V3-02.2011-12-31T23-18-11ZD.hdf.py example.
    lat = lat[:]
    long = long[:]
    size = lat.shape[0]

    print lat.shape[0]

    # You can visualize other blocks by changing subset parameters.
    #  data2d = data[3500:3999, 0:164]    # 20.2km to 30.1km
    #  data2d = data[3500:3999, 165:1164] #  8.2km to 20.2km

    # data2d = data[3500:4000, 1165:]  # -0.5km to  8.2km

    low_data2d = data_feature_type[:, 1165:
                                   1455]  #8.2 to -.5km  # -0.5km to  8.2km
    med_data2d2 = data_feature_type[:, 165:
                                    365]  # 20.2km to 8.2km 8.2km to  20.2km
    # high_data2d2 = data_feature_type[:, 0:55  ] # 30.2 to 20.km 8.2km to  20.2km

    #data2d = high_data2d2
    #print med_data2d2.shape
    #print high_data2d2.shape

    #  data2d = np.concatenate((high_data2d2, med_data2d2), axis=1)
    data2d = np.concatenate((med_data2d2, low_data2d), axis=1)
    #  data2d = np.concatenate((data2d, low_data2d), axis=1)

    print data2d.shape

    #data3d = np.reshape(data2d, (size, 15, 290))

    #data_sub_feature_type = data3d[:,0,:]

    # Focus on cloud (=2) data only.
    # data[data > 2] = 0;
    #data[data < 2] = 0;
    #data[data == 2] = 1;

    #  Focus on aersol (=3) data only.

    # Generate altitude data according to file specification [1].
    # alt = np.zeros(55)
    #alt = np.zeros(200)

    # alt = np.zeros(290)
    #alt = np.zeros(545)
    alt = np.zeros(490)

    # You can visualize other blocks by changing subset parameters.
    #  20.2km to 30.1km
    # for i in range (0, 54):
    #       alt[i] = 20.2 + i*0.18;

    # -0.5km to 8.2km

    for i in range(0, 290):

        print i
        alt[i] = -0.5 + i * 0.03

    print "breaK"
    for i in range(290, 490):

        print i
        alt[i] = 8.2 + (i - 289) * 0.06

#   for i in range (490, 545):
#
#         print i
#         alt[i] = 20.2 + (i-490)*0.180

# Contour the data on a grid of longitude vs. pressure
    latitude, altitude = np.meshgrid(lat, alt)

    # Make a color map of fixed colors.

    cmap = colors.ListedColormap(
        ['gray', 'blue', 'yellow', 'red', 'green', 'brown', 'black', 'white'])
    # cmap = colors.ListedColormap(['white', 'red', 'orange','magenta','green','blue','gray', ])

    # Define the bins and normalize.

    bounds = np.linspace(0, 8, num=9)
    #print bounds
    levels = [0, .99, 1.99, 2.99, 3.99, 4.99, 5.99, 6.99, 7.99]

    print levels

    print cmap.colors
    norm = mpl.colors.BoundaryNorm(bounds, cmap.N)

    #  print norm.boundaries
    print levels

    long_name = 'Feature Type (Bits 1-3) at Altitude 0 - 20.2km'
    basename = os.path.basename(FILE_NAME)
    plt.contourf(latitude,
                 altitude,
                 np.rot90(data2d, 1),
                 cmap=cmap,
                 levels=levels)

    plt.title('{0}\n{1}\n\n\n\n'.format(basename, long_name), fontsize=10)
    plt.xlabel('Latitude (degrees)', fontsize=8)
    plt.ylabel('Altitude (km)', fontsize=8)

    xticks = plt.xticks(fontsize="10")
    plt.yticks(fontsize="10")

    xticks.set_xticks([0, 1, 2, 3, 4, 5, 6, 7])

    #plt.subplots_adjust(right=0.87)

    axtwo = plt.twiny()

    axtwo.set_xlabel("Longitude (degrees)", fontsize=8)

    #axtwo.xaxis.set_label_coords(.5, 1.01)

    intlong = long.astype(int).flatten()

    longspace = int(math.floor((len(long[:]) / 7)))

    print "long space"
    print longspace
    print len(long[:])
    intlong_reversed_arr = intlong[::-1]

    axtwo.set_xticks([0, 1, 2, 3, 4, 5, 6, 7])
    axtwo.set_xticklabels(intlong_reversed_arr[::longspace], fontsize=10)

    print intlong_reversed_arr[::longspace]
    print intlong_reversed_arr[0]
    print intlong_reversed_arr[3743]

    print lat[::longspace]
    print lat[0]
    print lat[3743]

    print lat[2000]
    print long[2000]

    fig = plt.gcf()

    fig.set_figheight(4)
    fig.set_figwidth(6)

    plt.subplots_adjust(right=0.65)

    fig.tight_layout()

    # Create a second axes for the discrete colorbar.
    ax2 = fig.add_axes([0.88, 0.1, 0.01, 0.6])

    cb = mpl.colorbar.ColorbarBase(ax2, cmap=cmap, boundaries=bounds)
    cb.ax.set_yticklabels([
        'invalid', 'clear air', 'cloud', 'aerosol', 'stratospheric\nfeature',
        'surface', 'subsurface', 'no signal'
    ],
                          fontsize=6)

    # plt.show()
    pngfile = OUT_PATH + '/' + "{0}.v.py_feature_type_0-20.2km.png".format(
        basename)
    fig.savefig(pngfile)
    fig.clf()
import datetime
starttime = datetime.datetime.now()
#long running
#do something other


# 文件读取
HDF_FILR_URL = "1011-1.hdf"
file = SD(HDF_FILR_URL)
# print(file.info())
datasets_dic = file.datasets()
'''
for idx, sds in enumerate(datasets_dic.keys()):
    print(idx, sds)
'''
sds_obj1 = file.select('Longitude')  # select sds
sds_obj2 = file.select('Latitude')  # select sds
sds_obj3 = file.select('Optical_Depth_Land_And_Ocean')  # select sds
longitude = sds_obj1.get()  # get sds data
latitude = sds_obj2.get()  # get sds data
aod = sds_obj3.get()  # get sds data

# 参数设置
longitude = pd.DataFrame(longitude)
latitude = pd.DataFrame(latitude)
aod = pd.DataFrame(aod)
# print(aod.shape,aod.info()) #矩阵大小
'''
#print(aod[450][650])#先列后行
'''
#监测站 坐标设置