Example #1
0
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Ice Particle Diameter'

    hdf = SD(FILE_NAME, SDC.READ)

    # Read dataset.
    #
    # The file has many 'Ice Particle Diameter' dataset under
    # different Vgroup.
    #
    # Use HDFView to look up ref number.
    index = hdf.reftoindex(38)
    data4D = hdf.select(index)
    data = data4D[0, :, :, 0].astype(np.float64)

    # Read attributes.
    attrs = data4D.attributes(full=1)
    fva = attrs["_FillValue"]
    fillvalue = fva[0]
    ua = attrs["units"]
    units = ua[0]

    # Apply the fill value.
    data[data == fillvalue] = np.nan
    datam = np.ma.masked_array(data, mask=np.isnan(data))

    # The lat and lon should be calculated following [1].
    lat = np.linspace(89.5, -89.5, 180)

    # Generate Monthly Hourly Avgs which is 3 hour interval (3*8hr = 24hrs) [2].
    MHA = np.linspace(1, 8, 8)

    plt.contourf(MHA, lat, datam.T)
    plt.ylabel('Latitude (degrees_north)')
    plt.xlabel('Monthly 3-hourly')
    plt.xticks(MHA, [
        '00-03 GMT', '03-06 GMT', '06-09 GMT', '09-12 GMT', '12-15 GMT',
        '15-18 GMT', '18-21 GMT', '21-24 GMT'
    ],
               fontsize='8')
    cb = plt.colorbar()
    cb.set_label(units)

    basename = os.path.basename(FILE_NAME)

    # I guess Stats=0 means "mean" and Stats=1 means "standard
    # deviation." See [3] and page 154 of [4].
    plt.title('{0}\n{1}'.format(
        basename,
        '/1.0 Degree Zonal/Monthly Hourly Averages/Cloud Layer High/\n Ice Particle Diameter (Mean)'
    ),
              fontsize=11)
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Ice Particle Diameter'

    hdf = SD(FILE_NAME, SDC.READ)

    # Read dataset.
    # 
    # The file has many 'Ice Particle Diameter' dataset under
    # different Vgroup.
    # 
    # Use HDFView to look up ref number.
    index = hdf.reftoindex(38)
    data4D = hdf.select(index)
    data = data4D[0,:,:,0].astype(np.float64)

    # Read attributes.
    attrs = data4D.attributes(full=1)
    fva=attrs["_FillValue"]
    fillvalue = fva[0]
    ua=attrs["units"]
    units = ua[0]

    # Apply the fill value.
    data[data == fillvalue] = np.nan
    datam = np.ma.masked_array(data, mask=np.isnan(data))

    # The lat and lon should be calculated following [1].
    lat = np.linspace(89.5, -89.5, 180)
    
    # Generate Monthly Hourly Avgs which is 3 hour interval (3*8hr = 24hrs) [2].
    MHA = np.linspace(1, 8, 8)

    plt.contourf(MHA, lat, datam.T)
    plt.ylabel('Latitude (degrees_north)')
    plt.xlabel('Monthly 3-hourly')
    plt.xticks(MHA, ['00-03 GMT','03-06 GMT','06-09 GMT','09-12 GMT','12-15 GMT','15-18 GMT','18-21 GMT','21-24 GMT'], fontsize='8')
    cb = plt.colorbar()
    cb.set_label(units)

    basename = os.path.basename(FILE_NAME)

    # I guess Stats=0 means "mean" and Stats=1 means "standard
    # deviation." See [3] and page 154 of [4].
    plt.title('{0}\n{1}'.format(basename, '/1.0 Degree Zonal/Monthly Hourly Averages/Cloud Layer High/\n Ice Particle Diameter (Mean)'), fontsize=11)
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Ice Particle Diameter'

    hdf = SD(FILE_NAME, SDC.READ)

    # Read dataset.
    #
    # The file has many 'Ice Particle Diameter' dataset under
    # different Vgroup.
    #
    # Use HDFView to look up ref number.
    index = hdf.reftoindex(38)
    data4D = hdf.select(index)
    data = data4D[0, 3, :, 0].astype(np.float64)

    # Read attributes.
    attrs = data4D.attributes(full=1)
    fva = attrs["_FillValue"]
    fillvalue = fva[0]
    ua = attrs["units"]
    units = ua[0]

    # Apply the fill value.
    data[data == fillvalue] = np.nan
    datam = np.ma.masked_array(data, mask=np.isnan(data))

    # The lat and lon should be calculated following [1].
    lat = np.linspace(89.5, -89.5, 180)

    plt.plot(lat, data)
    plt.xlabel('Latitude (degrees_north)')
    plt.ylabel('{0} ({1})'.format(DATAFIELD_NAME, units))

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(
        basename,
        '/1.0 Degree Zonal/Monthly Hourly Averages/Cloud Layer High/\n Ice Particle Diameter (Mean; 09-12 GMT)'
    ),
              fontsize=11)
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.line.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Ice Particle Diameter'

    hdf = SD(FILE_NAME, SDC.READ)

    # Read dataset.
    # 
    # The file has many 'Ice Particle Diameter' dataset under
    # different Vgroup.
    # 
    # Use HDFView to look up ref number.
    index = hdf.reftoindex(38)
    data4D = hdf.select(index)
    data = data4D[0,3,:,0].astype(np.float64)

    # Read attributes.
    attrs = data4D.attributes(full=1)
    fva=attrs["_FillValue"]
    fillvalue = fva[0]
    ua=attrs["units"]
    units = ua[0]

    # Apply the fill value.
    data[data == fillvalue] = np.nan
    datam = np.ma.masked_array(data, mask=np.isnan(data))

    # The lat and lon should be calculated following [1].
    lat = np.linspace(89.5, -89.5, 180)

    plt.plot(lat, data)
    plt.xlabel('Latitude (degrees_north)')
    plt.ylabel('{0} ({1})'.format(DATAFIELD_NAME, units))

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}'.format(basename, '/1.0 Degree Zonal/Monthly Hourly Averages/Cloud Layer High/\n Ice Particle Diameter (Mean; 09-12 GMT)'), fontsize=11)
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.line.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Extent'

    if USE_GDAL:
        import gdal
        GRID_NAME = 'Southern Hemisphere'
        gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(
            FILE_NAME, GRID_NAME, DATAFIELD_NAME)
        gdset = gdal.Open(gname)
        data = gdset.ReadAsArray()

        meta = gdset.GetMetadata()
        x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform()
        nx, ny = (gdset.RasterXSize, gdset.RasterYSize)
        del gdset

    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset. Dataset name 'Extent' exists under different groups.
        # Use reference number to resolve ambiguity.
        data2D = hdf.select(hdf.reftoindex(12))
        data = data2D[:, :].astype(np.float64)

        # Read global attribute.
        fattrs = hdf.attributes(full=1)
        ga = fattrs["StructMetadata.0"]
        gridmeta = ga[0]

        # Construct the grid.  The needed information is in a global attribute
        # called 'StructMetadata.0'.  Use regular expressions to tease out the
        # extents of the grid.
        ul_regex = re.compile(
            r'''UpperLeftPointMtrs=\(
                                  (?P<upper_left_x>[+-]?\d+\.\d+)
                                  ,
                                  (?P<upper_left_y>[+-]?\d+\.\d+)
                                  \)''', re.VERBOSE)
        match = ul_regex.search(gridmeta)
        x0 = np.float(match.group('upper_left_x'))
        y0 = np.float(match.group('upper_left_y'))

        lr_regex = re.compile(
            r'''LowerRightMtrs=\(
                                  (?P<lower_right_x>[+-]?\d+\.\d+)
                                  ,
                                  (?P<lower_right_y>[+-]?\d+\.\d+)
                                  \)''', re.VERBOSE)
        match = lr_regex.search(gridmeta)
        x1 = np.float(match.group('lower_right_x'))
        y1 = np.float(match.group('lower_right_y'))
        ny, nx = data.shape
        xinc = (x1 - x0) / nx
        yinc = (y1 - y0) / ny

    x = np.linspace(x0, x0 + xinc * nx, nx)
    y = np.linspace(y0, y0 + yinc * ny, ny)
    xv, yv = np.meshgrid(x, y)

    # Reproject into WGS84
    lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=-90 +lon_0=0 +units=m")
    wgs84 = pyproj.Proj("+init=EPSG:4326")
    lon, lat = pyproj.transform(lamaz, wgs84, xv, yv)

    # Use a south polar azimuthal equal area projection.
    m = Basemap(projection='splaea', resolution='l', boundinglat=-60, lon_0=0)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 0, 15), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180, 180, 30), labels=[0, 0, 0, 1])

    # Bin the data as follows:
    # 0 -- snow-free land
    # 1-20% sea ice -- blue
    # 21-40% sea ice -- blue-cyan
    # 41-60% sea ice -- blue
    # 61-80% sea ice -- cyan-blue
    # 81-100% sea ice -- cyan
    # 101 -- permanent ice
    # 103 -- dry snow
    # 252 mixed pixels at coastlines
    # 255 ocean
    lst = [
        '#004400', '#0000ff', '#0044ff', '#0088ff', '#00ccff', '#00ffff',
        '#ffffff', '#440044', '#191919', '#000000', '#8888cc'
    ]
    cmap = mpl.colors.ListedColormap(lst)
    bounds = [0, 1, 21, 41, 61, 81, 101, 103, 104, 252, 255]
    tickpts = [0.5, 11, 31, 51, 71, 91, 102, 103.5, 178, 253.5]
    norm = mpl.colors.BoundaryNorm(bounds, cmap.N)

    # The corners cause trouble, so chop them out.
    idx = slice(5, 721)
    m.pcolormesh(lon[idx, idx],
                 lat[idx, idx],
                 data[idx, idx],
                 latlon=True,
                 cmap=cmap,
                 norm=norm)
    color_bar = plt.colorbar()
    color_bar.set_ticks(tickpts)
    color_bar.set_ticklabels([
        'snow-free\nland', '1-20% sea ice', '21-40% sea ice', '41-60% sea ice',
        '61-80% sea ice', '81-100% sea ice', 'permanent\nice', 'dry\nsnow',
        'mixed pixels\nat coastlines', 'ocean'
    ])
    color_bar.draw_all()

    basename = os.path.basename(FILE_NAME)
    long_name = DATAFIELD_NAME
    plt.title('{0}\n{1}'.format(basename, long_name))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.1.py.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):

    # Identify the data field.
    DATAFIELD_NAME = 'Net radiant flux'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)
    
        var = nc.variables[DATAFIELD_NAME]
        data = var[:].astype(np.float64)
        latitude = nc.variables['Colatitude'][:]
        longitude = nc.variables['Longitude'][:]

        # Read attributes.
        units = var.units
        fillvalue = var._FillValue
        long_name = var.long_name

    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        # The file has many 'Net radiation flux' dataset under
        # different Vgroup.
        # 
        # Use HDFView to look up ref number.
        index = hdf.reftoindex(141)
        data1D = hdf.select(index)

        data = data1D[:].astype(np.double)

        # Read geolocation datasets.
        lat = hdf.select(hdf.reftoindex(185))
        latitude = lat[:]

        lon = hdf.select(hdf.reftoindex(184))
        longitude = lon[:]


        # Read attributes.
        attrs = data1D.attributes(full=1)
        ua=attrs["units"]
        units = ua[0]
        fva=attrs["_FillValue"]
        fillvalue = fva[0]
        lna=attrs["long_name"]
        long_name = lna[0]

    # Apply the fill value attribute.
    data[data == fillvalue] = np.nan
    data = np.ma.masked_array(data, np.isnan(data))

    # Adjust lat values.
    latitude = 90 - latitude

    plt.plot(latitude, data)
    plt.xlabel('Latitude (degrees_north)')
    plt.ylabel('{0} ({1})'.format(DATAFIELD_NAME, units))

    basename = os.path.basename(FILE_NAME)
    plt.title('{0}\n{1}\n{2} at Longitude={3} (degrees_east)'.format(basename, long_name, DATAFIELD_NAME, longitude[0]), fontsize=11)
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
def run(FILE_NAME):
    
    # Identify the data field.
    DATAFIELD_NAME = 'Extent'

    if USE_GDAL:
        import gdal
        GRID_NAME = 'Southern Hemisphere'
        gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME,
                                                         GRID_NAME,
                                                         DATAFIELD_NAME)
        gdset = gdal.Open(gname)
        data = gdset.ReadAsArray()

        meta = gdset.GetMetadata()
        x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform()
        nx, ny = (gdset.RasterXSize, gdset.RasterYSize)
        del gdset

    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset. Dataset name 'Extent' exists under different groups.
        # Use reference number to resolve ambiguity.
        data2D = hdf.select(hdf.reftoindex(12))
        data = data2D[:,:].astype(np.float64)

        # Read global attribute.
        fattrs = hdf.attributes(full=1)
        ga = fattrs["StructMetadata.0"]
        gridmeta = ga[0]

        # Construct the grid.  The needed information is in a global attribute
        # called 'StructMetadata.0'.  Use regular expressions to tease out the
        # extents of the grid. 
        ul_regex = re.compile(r'''UpperLeftPointMtrs=\(
                                  (?P<upper_left_x>[+-]?\d+\.\d+)
                                  ,
                                  (?P<upper_left_y>[+-]?\d+\.\d+)
                                  \)''', re.VERBOSE)
        match = ul_regex.search(gridmeta)
        x0 = np.float(match.group('upper_left_x')) 
        y0 = np.float(match.group('upper_left_y')) 

        lr_regex = re.compile(r'''LowerRightMtrs=\(
                                  (?P<lower_right_x>[+-]?\d+\.\d+)
                                  ,
                                  (?P<lower_right_y>[+-]?\d+\.\d+)
                                  \)''', re.VERBOSE)
        match = lr_regex.search(gridmeta)
        x1 = np.float(match.group('lower_right_x'))
        y1 = np.float(match.group('lower_right_y'))
        ny, nx = data.shape
        xinc = (x1 - x0) / nx
        yinc = (y1 - y0) / ny

    x = np.linspace(x0, x0 + xinc*nx, nx)
    y = np.linspace(y0, y0 + yinc*ny, ny)
    xv, yv = np.meshgrid(x, y)

    # Reproject into WGS84
    lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=-90 +lon_0=0 +units=m")
    wgs84 = pyproj.Proj("+init=EPSG:4326") 
    lon, lat= pyproj.transform(lamaz, wgs84, xv, yv)

    # Use a south polar azimuthal equal area projection.
    m = Basemap(projection='splaea', resolution='l',
                boundinglat=-60, lon_0=0)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 0, 15), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180, 180, 30), labels=[0, 0, 0, 1])

    # Bin the data as follows:
    # 0 -- snow-free land
    # 1-20% sea ice -- blue
    # 21-40% sea ice -- blue-cyan
    # 41-60% sea ice -- blue
    # 61-80% sea ice -- cyan-blue
    # 81-100% sea ice -- cyan
    # 101 -- permanent ice
    # 103 -- dry snow
    # 252 mixed pixels at coastlines
    # 255 ocean
    lst = ['#004400', 
           '#0000ff',
           '#0044ff',
           '#0088ff',
           '#00ccff',
           '#00ffff',
           '#ffffff',
           '#440044',
           '#191919',
           '#000000',
           '#8888cc']
    cmap = mpl.colors.ListedColormap(lst)
    bounds = [0, 1, 21, 41, 61, 81, 101, 103, 104, 252, 255]
    tickpts = [0.5, 11, 31, 51, 71, 91, 102, 103.5, 178, 253.5] 
    norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
    
    # The corners cause trouble, so chop them out.
    idx = slice(5, 721)
    m.pcolormesh(lon[idx, idx], lat[idx, idx], data[idx, idx],
                 latlon=True, cmap=cmap, norm=norm)
    color_bar = plt.colorbar()
    color_bar.set_ticks(tickpts)
    color_bar.set_ticklabels(['snow-free\nland',
                              '1-20% sea ice',
                              '21-40% sea ice',
                              '41-60% sea ice',
                              '61-80% sea ice',
                              '81-100% sea ice',
                              'permanent\nice',
                              'dry\nsnow',
                              'mixed pixels\nat coastlines',
                              'ocean'])
    color_bar.draw_all()

    basename = os.path.basename(FILE_NAME)
    long_name = DATAFIELD_NAME
    plt.title('{0}\n{1}'.format(basename, long_name))
    fig = plt.gcf()
    # plt.show()
    pngfile = "{0}.1.py.png".format(basename)
    fig.savefig(pngfile)
Example #8
0
def run(FILE_NAME):

    DATAFIELD_NAME = '89.0V_Res.5B_TB_(not-resampled)'

    if USE_NETCDF4:
        from netCDF4 import Dataset
        nc = Dataset(FILE_NAME)

        data = nc.variables[DATAFIELD_NAME][:].astype(np.float64)
        latitude = nc.variables['Latitude'][:]
        longitude = nc.variables['Longitude'][:]
    
        # Replace the filled value with NaN, replace with a masked array.
        # Apply the scaling equation.  These attributes are named in a VERY
        # non-standard manner.
        scale_factor = getattr(nc.variables[DATAFIELD_NAME], 'SCALE FACTOR')
        add_offset = nc.variables[DATAFIELD_NAME].OFFSET

    else:
        from pyhdf.SD import SD, SDC
        hdf = SD(FILE_NAME, SDC.READ)

        # Read dataset.
        data2D = hdf.select(DATAFIELD_NAME)
        data = data2D[:,:].astype(np.float64)

        # Read geolocation dataset.
		# This product has multiple 'Latitude' and 'Longitude' pair under different groups.
        lat = hdf.select(hdf.reftoindex(192)) # Use HDFView to get ref number 192.
        latitude = lat[:,:]
        lon = hdf.select(hdf.reftoindex(194)) # Use HDFView to get ref number 194.
        longitude = lon[:,:]

        # Retrieve attributes.
        attrs = data2D.attributes(full=1)
        sfa=attrs["SCALE FACTOR"]
        scale_factor = sfa[0]        
        aoa=attrs["OFFSET"]
        add_offset = aoa[0]
        
    data[data == -32768] = np.nan
    data = data * scale_factor + add_offset
    datam = np.ma.masked_array(data, np.isnan(data))

    units = "degrees K"
    long_name = DATAFIELD_NAME

    # Since the swath starts near the south pole, but also extends over the
    # north pole, the equidistant cylindrical becomes a possibly poor choice
    # for a projection.  We show the full global map plus a limited polar map.
    fig = plt.figure(figsize=(15, 6))
    ax1 = plt.subplot(1, 2, 1)
    m = Basemap(projection='cyl', resolution='l',
                llcrnrlat=-90, urcrnrlat=90,
                llcrnrlon=-180, urcrnrlon=180)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(-90, 91, 30), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180, 181., 45), labels=[0, 0, 0, 1])
    m.pcolormesh(longitude, latitude, datam, latlon=True)

    ax2 = plt.subplot(1, 2, 2)
    m = Basemap(projection='npstere', resolution='l',
                boundinglat=65, lon_0=0)
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(60, 81, 10), labels=[1, 0, 0, 0])
    m.drawmeridians(np.arange(-180., 181., 30.), labels=[1, 0, 0, 1])
    m.pcolormesh(longitude, latitude, datam, latlon=True)

    cax = plt.axes([0.92, 0.1, 0.03, 0.8])
    cb = plt.colorbar(cax=cax)
    cb.set_label(units)
    

    basename = os.path.basename(FILE_NAME)
    fig = plt.gcf()
    fig.suptitle('{0}\n{1}'.format(basename, long_name))
    # plt.show()
    pngfile = "{0}.py.png".format(basename)
    fig.savefig(pngfile)
Example #9
0
    def hdfget(self, id=False, section=False, group=False):

        #for i, drs in enumerate(self.dirLst[0:500]):
        for i, drs in enumerate(self.dirLst[:]):

            #print drs

            hdf = HDF(drs, HC.READ)
            sd = SD(drs)

            vs = hdf.vstart()
            v = hdf.vgstart()

            # Scan all vgroups in the file.
            ref = -1
            while 1:
                try:
                    ref = v.getid(ref)

                    vg = v.attach(ref)

                    if vg._name == section:

                        # Read the contents of the vgroup.
                        members = vg.tagrefs()

                        # Display info about each member.
                        index = -1
                        for tag, ref2 in members:
                            index += 1
                            #print "member index", index
                            # Vdata tag
                            if tag == HC.DFTAG_VH:
                                vd = vs.attach(ref2)
                                nrecs, intmode, fields, size, name = vd.inquire(
                                )

                                if vd._name == group:

                                    self.HDF.setdefault(id + '_fields',
                                                        []).append(fields)
                                    self.HDF.setdefault(id + '_nrecs',
                                                        []).append(nrecs)
                                    self.HDF.setdefault(id + '_data',
                                                        []).append(
                                                            np.asarray(vd[:]))

                                vd.detach()

                            # SDS tag
                            elif tag == HC.DFTAG_NDG:
                                sds = sd.select(sd.reftoindex(ref2))
                                name, rank, dims, type, nattrs = sds.info()

                                sds.endaccess()

                            elif tag == HC.DFTAG_VG:
                                vg0 = v.attach(ref2)
                                vg0.detach()

                            else:
                                print "unhandled tag,ref", tag, ref2

                except HDF4Error, msg:  # no more vgroup
                    break