def cimport_datat(self, idata_name=('TS', )):
        '''Import ten-year ambient temperature data. 
		Args:
			idata_name: the data name.
		Returns: 
			self.dict_solar: imported data.
		'''
        year_index = range(self.start_year, self.end_year + 1)
        site_num = len(self.site_indext)
        zero = str(0)
        for each_siteth in range(1, site_num + 1):
            self.dict_temperature[each_siteth] = {}
            for each_year in year_index:
                self.dict_temperature[each_siteth][each_year] = {}
                for each_month in SolarData.month_name:
                    self.dict_temperature[each_siteth][each_year][
                        each_month] = np.empty((0, 24), np.float32)
        for each_year in year_index:
            if ((each_year % 400 == 0)
                    or ((each_year % 4 == 0) and (each_year % 100 != 0))):
                year_feature = SolarData.leap_year
            else:
                year_feature = SolarData.nonleap_year
            for each_month in SolarData.month_name:
                if ((each_year == 2010)
                        and (each_month in SolarData.spl_month2010)):
                    fnames = self.file_patht + SolarData.fnameta
                else:
                    fnames = self.file_patht + SolarData.fnametb
                day_s = year_feature[each_month][1]
                day_e = year_feature[each_month][2]
                if year_feature[each_month][3]:
                    for each_day in range(day_s, day_e + 1):
                        fname = fnames + str(each_year) + zero + str(
                            each_day) + SolarData.fnamee
                        print fname
                        fid = SD(fname)
                        tmpirrad = fid.select(idata_name[0])[:, :, :]
                        fid.end()
                        for each_siteth in range(1, site_num + 1):
                            tmpirrad_site = tmpirrad[:, self.site_indext[each_siteth - 1][0], \
                            self.site_indext[each_siteth - 1][1]].reshape(1, -1)
                            self.dict_temperature[each_siteth][each_year][each_month] = \
                            np.vstack((self.dict_temperature[each_siteth][each_year][each_month], tmpirrad_site))
                        # print(tmpirrad_site)
                else:
                    for each_day in range(day_s, day_e + 1):
                        fname = fnames + str(each_year) + str(
                            each_day) + SolarData.fnamee
                        print fname
                        fid = SD(fname)
                        tmpirrad = fid.select(idata_name[0])[:, :, :]
                        fid.end()
                        for each_siteth in range(1, site_num + 1):
                            tmpirrad_site = tmpirrad[:, self.site_indext[each_siteth - 1][0], \
                            self.site_indext[each_siteth - 1][1]].reshape(1, -1)
                            self.dict_temperature[each_siteth][each_year][each_month] = \
                            np.vstack((self.dict_temperature[each_siteth][each_year][each_month], tmpirrad_site))
                        # print(tmpirrad_site)
        return self.dict_temperature
def write_interpolated(filename, f0, f1, fact, datasets):
    '''
    interpolate two hdf files f0 and f1 using factor fact, and
    write the result to filename
    '''

    hdf = SD(filename, SDC.WRITE|SDC.CREATE)
    for dataset in datasets:

        try:
            info = SD(f0).select(dataset).info()
        except:
            print >> stderr, 'Error loading %s in %s' % (dataset, f0)
            raise

        typ  = info[3]
        shp  = info[2]
        met0 = SD(f0).select(dataset).get()
        met1 = SD(f1).select(dataset).get()

        interp = (1-fact)*met0 + fact*met1

        interp = interp.astype({
                SDC.INT16: 'int16',
                SDC.FLOAT32: 'float32',
                SDC.FLOAT64: 'float64',
            }[typ])

        # write
        sds = hdf.create(dataset, typ, shp)
        sds[:] = interp[:]
        sds.endaccess()

    hdf.end()
Example #3
0
def main():
    # Agrupate all the files in the path
    list_files = agrupate_files(path)
    # For each file in the path, it is created a new HDF file
    for files in list_files:
        # Creation of the objects in order to get the data
        objs = read_files(files)
        # Getting the data: 3 rectangular numpy arrays lat, lon and fire_mask
        lat = np.array(objs[0].get())
        lon = np.array(objs[1].get())
        fire_mask = np.array(objs[2].get())
        # Creation of a data tuple
        data = (lat, lon, fire_mask)
        # New file name from the input files name
        m = files[0].split('/')
        mm = m[-1].split('.')
        nfile = mm[0][0:3] + 'NN.' + '.'.join(mm[1:len(mm)])
        # Creation of the new HDF file
        nhdf = SD(nfile, SDC.WRITE | SDC.CREATE)
        print 'Creating:', nfile
        # Writting the data in the new HDF file
        write_file(nhdf, 'Latitude', data[0], SDC.FLOAT32)
        write_file(nhdf, 'Longitude', data[1], SDC.FLOAT32)
        write_file(nhdf, 'FireMask', data[2], SDC.INT32)
        nhdf.end()
Example #4
0
def read_var_point(filename,var_name,i,j,k,thetac,phic):
    thetac = thetac[j]
    phic   = phic[k]


    hdffile = SD(filename,SDC.READ)
    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
        var=hdffile.select(var_name+'_').get(start=(k,j,i),count=(1,1,1)).squeeze()
    else:
#        R,theta,phi=r_theta_phi_uniform(filename)

        if var_name in ['br','btheta','bphi']:
            bx=hdffile.select('bx_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            by=hdffile.select('by_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            bz=hdffile.select('bz_').get(start=(k,j,i),count=(1,1,1)).squeeze()

            if var_name=='br':
                var     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
            elif var_name=='btheta':
                var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
            else:
                var   =-bx*sin(phic)            + by*cos(phic)
        else:
            vx=hdffile.select('vx_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            vy=hdffile.select('vy_').get(start=(k,j,i),count=(1,1,1)).squeeze()
            vz=hdffile.select('vz_').get(start=(k,j,i),count=(1,1,1)).squeeze()

            if var_name=='vr':
                var    = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
            elif var_name=='vtheta':
                var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
            else:
                var   =-vx*sin(phic)            + vy*cos(phic)
    hdffile.end()
    return(var)
Example #5
0
def MOD03_read(mod03_file):

    h4f = SD(mod03_file)
    sds = h4f.select('Latitude')
    lat = sds.get()
    sds.endaccess()

    sds = h4f.select('Longitude')
    lon = sds.get()
    sds.endaccess()

    sds = h4f.select('SolarZenith')
    sza = sds.get()
    sds.endaccess()

    sds = h4f.select('SensorZenith')
    vza = sds.get()
    sds.endaccess()

    sds = h4f.select('SolarAzimuth')
    saa = sds.get()
    sds.endaccess()

    sds = h4f.select('SensorAzimuth')
    vaa = sds.get()
    sds.endaccess()
    h4f.end()

    return lat, lon, sza, vza, saa, vaa
Example #6
0
def main(yaml_file):
    in_cfg = ReadInYaml(yaml_file)

    job_name = in_cfg.job_name
    ymd = in_cfg.ymd
    in_path = in_cfg.ipath
    out_path = in_cfg.opath
    for eachfile in in_path:
        if not os.path.isdir(out_path):
            os.makedirs(out_path)
        file_name = os.path.basename(eachfile)
        out_file = os.path.join(out_path, file_name)
        out_fig = os.path.join(out_path, file_name + '.bmp')
        cmd = """./bin/Global_Daily_SnowCover_0d05Deg_from_AVH09C1_V41_test_20190625 ./bin/Para/ %s %s %s""" % (
            eachfile, out_file, out_fig)

        os.system(cmd)
        print(cmd)
        h4r = SD(out_file)
        ndsi_data = h4r.select('Day_CMG_Snow_Cover')[:]
        h4r.end()
        #         print (ndsi_data)
        #
        #         file_name = os.path.basename(eachfile) + '.png'
        #         out_fig = os.path.join(out_path, file_name)
        #
        #
        out_fig = os.path.join(out_path, file_name + '.png')
        print(out_fig)
        plot_map(job_name, ymd, ndsi_data, out_fig)
Example #7
0
    def validate_file(self, satellite, year, fday, H, V):

        str_year = str(year)
        str_fday = str(fday).zfill(3)
        str_H = str(H).zfill(2)
        str_V = str(V).zfill(2)

        # file exists?
        try:
            mcd_file = mcd_files_dict[(satellite, str_year, str_fday, str_H,
                                       str_V)]
        except KeyError:
            self.lcm_meta.append([year, H, V, 'missing'])
            return None

        # file can be opened?
        try:
            _ = SD(os.path.join(mcd_data, mcd_file), SDC.READ)
            _.end()
        except HDF4Error:
            self.lcm_meta.append([year, H, V, 'garbled'])
            return None

        self.lcm_meta.append([year, H, V, 'all_good'])

        return mcd_file
Example #8
0
def ancillary_interp_ozone(file, lon, lat, dataset='ozone', kind='linear'):
    from pyhdf.SD import SD, SDC

    f = SD(file, SDC.READ)
    datasets_dic = f.datasets()
    meta = f.attributes()
    sds_obj = f.select(dataset)
    data = sds_obj.get()
    f.end()
    f = None

    from numpy import linspace
    from scipy import interpolate

    ## make lons and lats for this file
    lons = linspace(meta["Westernmost Longitude"],
                    meta["Easternmost Longitude"],
                    num=meta['Number of Columns'])
    lats = linspace(meta["Northernmost Latitude"],
                    meta["Southernmost Latitude"],
                    num=meta['Number of Rows'])

    ## do interpolation in space
    if kind == 'nearest':
        xi, xret = min(enumerate(lons), key=lambda x: abs(x[1] - float(lon)))
        yi, yret = min(enumerate(lats), key=lambda x: abs(x[1] - float(lat)))
        uoz = data[yi, xi] / 1000.
    else:
        interp = interpolate.interp2d(lons, lats, data, kind=kind)
        uoz = (interp(lon, lat))[0] / 1000.

    anc_ozone = {'ozone': {'interp': uoz}}
    return (anc_ozone)
Example #9
0
def read_MODIS_file(fname):
    os.chdir(DataDir)
    hdf = SD(fname, SDC.READ)
    metadata = hdf.attributes()
    firepix = metadata['FirePix']
    if firepix == 0:
        print('no fire pixel in %s' % (fname))
        hdf.end()
        return []
    print('%s fire pixel in %s' % (firepix, fname))
    data = np.zeros((firepix, 5))
    sds = hdf.select('FP_latitude')
    latdata = sds.get()
    sds = hdf.select('FP_longitude')
    londata = sds.get()
    sds = hdf.select('FP_power')
    FRPdata = sds.get()
    sds = hdf.select('FP_confidence')
    FRPcondata = sds.get()
    sds = hdf.select('FP_ViewZenAng')
    vzadata = sds.get()
    data[:, 0] = latdata
    data[:, 1] = londata
    data[:, 2] = FRPdata
    data[:, 3] = vzadata
    data[:, 4] = FRPcondata

    hdf.end()
    return data
Example #10
0
    def hdfopen(self, hdfPath, var='NDVI'):
        archivos = self.listFile(hdfPath, sufijo="hdf")
        print(archivos)
        expData = []
        for arc in archivos:
            print(arc)
            # Read Hdffile file
            hdf = SD(arc, SDC.READ)
            #Print Variables
            print(hdf.datasets())
            #read Dataset
            dataVar = hdf.select(var)
            print("metadata from var")
            print(dataVar.dimensions())
            print(dataVar.attributes())
            dataMat = dataVar[:, :]
            print(dataMat)
            print(type(dataMat))
            expData.append(dataMat)
            print("Datavar #####")
            ##geolocation Var names
            # Read geolocation dataset.

            print("geolocation vars ················")
            lat = hdf.select('Latitude')
            latitude = lat[:, :]
            print(latitude)
            lon = hdf.select('Longitude')
            longitude = lon[:, :]
            print(longitude)
            hdf.end()
        self.plotMap(dataMat, longitude, latitude)
        return dataMat
Example #11
0
def read_rrc(inpath):
    '''Read rrc data m*n from hdf file'''

    '''b1-5;b13-16 for MODIS Rrc
        Rrc_1238 Rrc_443-862 ozone senz solz for VIIRS rrc   
    '''  
    hdf = SD(inpath, SDC.READ)
    #dts = sorted(hdf.datasets().keys())
    modis_key = ['CorrRefl_01','CorrRefl_02','CorrRefl_03','CorrRefl_04','CorrRefl_05',
                 'CorrRefl_13','CorrRefl_14','CorrRefl_15','CorrRefl_16']
    viirs_key = ['Rrc_443','Rrc_486','Rrc_551','Rrc_671','Rrc_745','Rrc_862','Rrc_1238']
    mission = os.path.basename(inpath)[0]
    if mission =='A' or mission =='T':keys = modis_key
    elif mission=='V':keys = viirs_key
    else:keys = hdf.datasets().keys()
    for i,dt in enumerate(keys):
        print(i,dt)
        band = hdf.select(dt)[:,:]        
        if i==0:             
            limit = (band.shape[0],band.shape[1],len(keys))            
            rrc = np.zeros(limit,dtype = np.float)
            rrc[:,:,i] = band
        else:
            rrc[:,:,i] = band
    hdf.end()
    print(rrc.shape)
    return rrc
Example #12
0
def get_band_numbers(filename_MOD02, variable_names):
    """
    TODO
    """
    band_numbers = []
    f = SD(filename_MOD02)  #.select('MODIS_SWATH_Type_L1B')
    #dsets = f.datasets()

    #print(f.select(variable_names[0]).attributes.keys)

    valid_maxs = []
    for variable_name in variable_names:
        if '1KM_Emissive' in variable_name:
            b_nums = f.select('Band_1KM_Emissive')[:]
        elif '250' in variable_name:
            b_nums = f.select('Band_250M')[:]
        elif '500' in variable_name:
            b_nums = f.select('Band_500M')[:]
        elif '1KM_RefSB' in variable_name:
            b_nums = f.select('Band_1KM_RefSB')[:]
        radiance_scale, offsets = get_scale_and_offset(f.select(variable_name),
                                                       True)
        max_val = (32767 - np.array(offsets)) * np.array(radiance_scale)
        valid_maxs.append(max_val)
        band_numbers.append(b_nums)
    f.end()
    band_numbers = np.concatenate(band_numbers)
    valid_maxs = np.concatenate(valid_maxs, axis=0)
    valid_mins = np.zeros(valid_maxs.shape)
    valid_range = np.stack([valid_mins, valid_maxs], axis=-1)
    return band_numbers, valid_range
Example #13
0
def read_var_islice(filename,var_name,i,thetac,phic):
    nk = phic.size
    nj = thetac.size
    phic = phic[:,None]
    thetac = thetac[None,:]

    hdffile = SD(filename,SDC.READ)
    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
        var=hdffile.select(var_name+'_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
    else:
        if var_name in ['br','btheta','bphi']:
            bx=hdffile.select('bx_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            by=hdffile.select('by_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            bz=hdffile.select('bz_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()

            if var_name=='br':
                var     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
            elif var_name=='btheta':
                var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
            else:
                var   =-bx*sin(phic)            + by*cos(phic)
        else:
            vx=hdffile.select('vx_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            vy=hdffile.select('vy_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
            vz=hdffile.select('vz_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()

            if var_name=='vr':
                var    = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
            elif var_name=='vtheta':
                var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
            else:
                var   =-vx*sin(phic)            + vy*cos(phic)
    hdffile.end()
    return(var)
Example #14
0
def main(cal_file, with_cp):

    from pyhdf.SD import SD

    if with_cp:
        cmd = 'cp %s /home/noel/scratch/' % (cal_file)
        print "running "+cmd
        os.system(cmd)
        filename = os.path.basename(cal_file)
        cal_file = '/home/noel/scratch/' + filename
                        
    print 'Reading ' + cal_file 
    
    vars = ['Latitude', 'Longitude', 
            'Total_Attenuated_Backscatter_532', 'Attenuated_Backscatter_1064', 'Perpendicular_Attenuated_Backscatter_532',
            'Pressure', 'Temperature', 'Molecular_Number_Density', 'Tropopause_Height', 'Surface_Elevation']
    
    hdf = SD(cal_file)
    for var in vars:
        print 'Reading ' + var
        hdf_var = hdf.select(var)
        data = hdf_var.get()
        hdf_var.endaccess()
    hdf.end()
    
    print 'ok.'
    if with_cp:
        print 'Removing '+filename
        cmd = 'rm -f /home/noel/scratch/' + filename
        os.system(cmd)
Example #15
0
    def get_timestamp(self):
        """
        return from 1970-01-01 00:00:00 seconds
        """
        if self.resolution == 13500:
            satellite_type1 = ['AQUA', 'TERRA']
            if self.satellite in satellite_type1:
                h4r = SD(self.in_file, SDC.READ)
                ary_time = h4r.select('Time').get()
                h4r.end()
                ary_time = ary_time  # / 1000000.
                # npp cris 数据的时间单位是距离 1958年1月1日 UTC时间的microseconds 微秒
                # ymdhms/1000000 = 秒  (距离1958年1月1日 UTC时间)
                secs = (datetime(1993, 1, 1, 0, 0, 0) -
                        datetime(1970, 1, 1, 0, 0, 0)).total_seconds()
                # 返回距离1970-01-01的秒
                data = ary_time + secs
                data = data.astype(np.int32)

            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))
        else:
            raise ValueError(
                'Cant read this data, please check its resolution: {}'.format(
                    self.in_file))
        return data
Example #16
0
def w(ofile, data, sds_base_name, nbands, xrange, yrange, units, rowdimname,
      coldimname):
    "hdf_utils.w(ofile, 'sds_base_name', nbands, xdim, ydim, 'units', 'rowdimname', 'coldimname')"
    "Function to create hdf file ofile and write data to it"
    # Dr. M. Disney, Sep 2011

    # create and write hdf file via SD
    dataopf = SD(ofile, SDC.WRITE | SDC.CREATE | SDC.TRUNC)
    for i in np.arange(0, nbands):
        sds = dataopf.create(sds_base_name + str(i), SDC.FLOAT32,
                             (xrange, yrange))
        sds.name = '' + str(i)
        sds.units = units
        sds.setfillvalue(0)
        dim1 = sds.dim(0)
        dim1.setname(rowdimname)
        dim2 = sds.dim(1)
        dim2.setname(coldimname)
        if nbands > 1:
            sds[:] = np.float32(data[i])
        else:
            sds[:] = np.float32(data)

    sds.endaccess()
    dataopf.end()
Example #17
0
def create_hdfeos_test_file(filename: str,
                            variable_infos: dict,
                            geo_resolution: Optional[int] = None,
                            file_shortname: Optional[str] = None,
                            include_metadata: bool = True):
    """Create a fake MODIS L1b HDF4 file with headers.

    Args:
        filename: Full path of filename to be created.
        variable_infos: Dictionary mapping HDF4 variable names to dictionary
            of variable information (see ``_add_variable_to_file``).
        geo_resolution: Resolution of geolocation datasets to be stored in the
            metadata strings stored in the global metadata attributes. Only
            used if ``include_metadata`` is ``True`` (default).
        file_shortname: Short name of the file to be stored in global metadata
            attributes. Only used if ``include_metadata`` is ``True``
            (default).
        include_metadata: Include global metadata attributes (default: True).

    """
    h = SD(filename, SDC.WRITE | SDC.CREATE)

    if include_metadata:
        if geo_resolution is None or file_shortname is None:
            raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.")
        setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname))  # noqa
        setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution))  # noqa
        setattr(h, 'ArchiveMetadata.0', _create_header_metadata())  # noqa

    for var_name, var_info in variable_infos.items():
        _add_variable_to_file(h, var_name, var_info)

    h.end()
Example #18
0
    def get_height(self):
        """
        return height
        """
        if self.resolution == 1000:
            satellite_type = ['AQUA', 'TERRA']
            if self.satellite in satellite_type:
                vmin = 27000
                vmax = 65535
                geo_file = self.__get_geo_file()
                h4r = SD(geo_file, SDC.READ)
                data_pre = h4r.select('Range').get()
                scale = h4r.select('Range').attributes()['scale_factor']
                h4r.end()
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))

            # 过滤无效值
            invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
            data_pre = data_pre.astype(np.float32)
            data_pre[invalid_index] = np.nan
            data = data_pre * scale

        return data
Example #19
0
def read_var_ikslice(filename,var_name,i,k,tc,pc):
    hdffile = SD(filename,SDC.READ)
    ni = hdffile.select('X_grid').ni-1
    nj = hdffile.select('X_grid').nj-1

    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi','bt','bp','vt','vp']:
        var=hdffile.select(var_name+'_').get(start=(k,0,i),count=(1,nj,1)).squeeze()
    else:
        if var_name in ['br','btheta','bphi','bt','bp']:
            bx=hdffile.select('bx_').get(start=(k,0,i),count=(1,nj,1)).squeeze()
            by=hdffile.select('by_').get(start=(k,0,i),count=(1,nj,1)).squeeze()
            bz=hdffile.select('bz_').get(start=(k,0,i),count=(1,nj,1)).squeeze()

            if var_name=='br':
                var     = bx*cos(pc[k])*sin(tc) + by*sin(pc[k])*sin(tc) + bz*cos(tc)
            elif (var_name=='btheta' or var_name=='bt'):
                var = bx*cos(pc[k])*cos(tc) + by*sin(pc[k])*cos(tc) - bz*sin(tc)
            else:
                var   =-bx*sin(pc[k])            + by*cos(pc[k])
        else:
            vx=hdffile.select('vx_').get(start=(k,0,i),count=(1,nj,1)).squeeze()
            vy=hdffile.select('vy_').get(start=(k,0,i),count=(1,nj,1)).squeeze()
            vz=hdffile.select('vz_').get(start=(k,0,i),count=(1,nj,1)).squeeze()

            if var_name=='vr':
                var    = vx*cos(pc[k])*sin(tc) + vy*sin(pc[k])*sin(tc) + vz*cos(tc)
            elif (var_name=='vtheta' or var_name=='vt'):
                var = vx*cos(pc[k])*cos(tc) + vy*sin(pc[k])*cos(tc) - vz*sin(tc)
            else:
                var   =-vx*sin(pc[k])            + vy*cos(pc[k])
    hdffile.end()
    return(var)
Example #20
0
    def Loadgeo(self, geoFile):

        try:
            h4File_R = SD(geoFile, SDC.READ)
            Lons = h4File_R.select('pixel_longitude').get()
            Lats = h4File_R.select('pixel_latitude').get()
            sata = h4File_R.select('pixel_satellite_azimuth_angle').get()
            satz = h4File_R.select('pixel_satellite_zenith_angle').get()

            idx = np.where(np.isclose(Lons, -999.))
            Lons[idx] = np.nan
            self.Lons = Lons

            idx = np.where(np.isclose(Lats, -999.))
            Lats[idx] = np.nan
            self.Lats = Lats

            idx = np.where(np.isclose(sata, -999.))
            sata[idx] = np.nan
            self.satAzimuth = sata

            idx = np.where(np.isclose(satz, -999.))
            satz[idx] = np.nan
            self.satZenith = satz

        except Exception as e:
            print str(e)
            return
        finally:
            h4File_R.end()
 def prepare(self):
     """
     Public:
         Handles reprojection of file, conversion of hdf into GeoTIFF
     """
     data_file = SD(self.input_file, SDC.READ)
     extracted_data = list()
     for band in self.bands:
         band_data = data_file.select(band)
         extracted_data.append(band_data.get())
     self.attributes = data_file.attributes()
     data_file.end()
     extracted_data = np.array(extracted_data)
     extracted_data[np.where(
         extracted_data <= self.low_thres)] = self.low_value
     extracted_data = np.log(extracted_data)
     extracted_data[np.where(extracted_data >= self.high_thres)] = HIGH_VAL
     indices = np.where((extracted_data > self.low_thres)
                        & (extracted_data < self.high_thres))
     extracted_data[indices] = (HIGH_VAL *
                                (extracted_data[indices] - self.low_thres) /
                                self.diff)
     extracted_data = extracted_data.astype(rasterio.uint8)
     file_name = self.input_file.split("/")[-1]
     self.prepare_thumbnail(extracted_data, file_name)
Example #22
0
    def get_rad1(self):
        """
        return rad
        """
        #         dsl = sun_earth_dis_correction(self.ymd)
        data = dict()
        if self.resolution == 1000:  # 分辨率为 1000
            satellite_type = ['AQUA', 'TERRA']
            data_file = self.in_file
            if self.satellite in satellite_type:
                dn = self.get_dn()
                h4r = SD(data_file, SDC.READ)

                ary_ch20_36_a = h4r.select(
                    'EV_1KM_Emissive').attributes()['radiance_scales']
                ary_ch20_36_b = h4r.select(
                    'EV_1KM_Emissive').attributes()['radiance_offsets']

                h4r.end()

                #                 center_wn = self.get_central_wave_number()
                # 逐个通道处理
                for i in range(self.channels):
                    band = 'CH_{:02d}'.format(i + 1)

                    if i >= 20:
                        k = i - 20
                        if i <= 25:
                            band = 'CH_{:02d}'.format(i)
                        data_pre = (dn[band] -
                                    ary_ch20_36_b[k]) * ary_ch20_36_a[k]

                        data[band] = data_pre

        return data
def readMOD35L2(fname, geoloc_only=False):
    hdf_file = SD(HDFDIR + fname)
    if not geoloc_only:
        cloud_mask = hdf_file.select('Cloud_Mask').get()
    lon = hdf_file.select('Longitude').get()
    lat = hdf_file.select('Latitude').get()
    hdf_file.end()
    
    if not geoloc_only:
        cld_msk = uint8(cloud_mask[0])
        cloud = cld_msk & 6 # 0, 2, 4, 6
        land = cld_msk & 192 # 0, 64, 128, 192
    
        cloud[cloud==0] = 1 # 0 -> Cloud
        cloud[cloud!=1] = 0 # 2, 4, 6 -> No cloud

        coast = land
        coast[coast==64] = 1 # 64 -> Coast
        coast[coast!=1] = 0 # 0, 128, 192 -> Not coast

        land[land!=0] = 1 # 64, 128, 192 -> Land, 0 -> Water
        
        return lon, lat, cloud, land, coast

    return lon, lat
Example #24
0
    def read(self, filename, **kwargs):
        """Read the data"""
        from pyhdf.SD import SD
        import datetime

        # print "*** >>> Read the hdf-eos file!"
        root = SD(filename)

        # Get all the Attributes:
        # Common Attributes, Data Time,
        # Data Structure and Scene Coordinates
        for key in root.attributes().keys():
            self._eoshdf_info[key] = root.attributes()[key]

        # Start Time - datetime object
        starttime = datetime.datetime.strptime(self._eoshdf_info['Start Time'][0:13],
                                               "%Y%j%H%M%S")
        msec = float(self._eoshdf_info['Start Time'][13:16]) / 1000.
        self.starttime = starttime + datetime.timedelta(seconds=msec)

        # End Time - datetime object
        endtime = datetime.datetime.strptime(self._eoshdf_info['End Time'][0:13],
                                             "%Y%j%H%M%S")
        msec = float(self._eoshdf_info['End Time'][13:16]) / 1000.
        self.endtime = endtime + datetime.timedelta(seconds=msec)

        # What is the leading 'H' doing here?
        sensor_name = self._eoshdf_info['Sensor Name'][1:-1].lower()
        try:
            self.satid = EOS_SATELLITE[sensor_name]
        except KeyError:
            LOG.error("Failed setting the satellite id - sat-name = ",
                      sensor_name)

        self.orbit = self._eoshdf_info['Orbit Number']
        self.shape = (self._eoshdf_info['Number of Scan Control Points'],
                      self._eoshdf_info['Number of Pixel Control Points'])

        # try:
        if 1:
            value = root.select(self.name)
            attr = value.attributes()
            data = value.get()

            self.attr = attr
            band = data
            if self.name in FLAGS_QUALITY:
                self.data = band
            else:
                nodata = attr['bad_value_scaled']
                self.data = (np.ma.masked_equal(band, nodata) *
                             attr['slope'] + attr['intercept'])

            value.endaccess()
        # except:
        #    pass

        root.end()
        self.filled = True
Example #25
0
    def read(self, filename, **kwargs):
        """Read the data"""
        from pyhdf.SD import SD
        import datetime

        #print "*** >>> Read the hdf-eos file!"
        root = SD(filename)
    
        # Get all the Attributes:
        # Common Attributes, Data Time,
        # Data Structure and Scene Coordinates
        for key in root.attributes().keys():
            self._eoshdf_info[key] = root.attributes()[key]

        # Start Time - datetime object
        starttime = datetime.datetime.strptime(self._eoshdf_info['Start Time'][0:13], 
                                               "%Y%j%H%M%S")
        msec = float(self._eoshdf_info['Start Time'][13:16])/1000.
        self.starttime = starttime + datetime.timedelta(seconds=msec)
    
        # End Time - datetime object
        endtime = datetime.datetime.strptime(self._eoshdf_info['End Time'][0:13], 
                                             "%Y%j%H%M%S")
        msec = float(self._eoshdf_info['End Time'][13:16])/1000.
        self.endtime = endtime + datetime.timedelta(seconds=msec)

        # What is the leading 'H' doing here?
        sensor_name = self._eoshdf_info['Sensor Name'][1:-1].lower()
        try:
            self.satid = EOS_SATELLITE[sensor_name]
        except KeyError:
            LOG.error("Failed setting the satellite id - sat-name = ", 
                      sensor_name)
            
        self.orbit = self._eoshdf_info['Orbit Number']
        self.shape = (self._eoshdf_info['Number of Scan Control Points'],
                      self._eoshdf_info['Number of Pixel Control Points'])

        #try:
        if 1:
            value = root.select(self.name)
            attr = value.attributes()
            data = value.get()

            self.attr = attr
            band = data
            if self.name in FLAGS_QUALITY:
                self.data = band
            else:
                nodata = attr['bad_value_scaled']
                self.data = (np.ma.masked_equal(band, nodata) * 
                             attr['slope'] + attr['intercept'])
            
            value.endaccess()
        #except:
        #    pass

        root.end()
        self.filled= True
Example #26
0
def _create_fake_dem_file(dem_fn, var_name, fill_value):
    from pyhdf.SD import SD, SDC
    h = SD(dem_fn, SDC.WRITE | SDC.CREATE)
    dem_var = h.create(var_name, SDC.INT16, (10, 10))
    dem_var[:] = np.zeros((10, 10), dtype=np.int16)
    if fill_value is not None:
        dem_var.setfillvalue(fill_value)
    h.end()
Example #27
0
def get_L2(L2_file):
    L2_file_obj = SD(L2_file)
    cmask_obj = L2_file_obj.select('baseline_cmask_goes_nop_cloud_mask')
    cmask = cmask_obj.get()
    cmask_obj.endaccess()
    L2_file_obj.end()

    return cmask
Example #28
0
 def load_geo(self):
     if self.geo_latlon is None:
         geo = SD(self.location_sourcedir + self.location, SDC.READ)
         self.geo_lat = geo.select('Latitude').get()
         self.geo_lon = geo.select('Longitude').get()
         self.geo_latlon = (self.geo_lat, self.geo_lon)
         geo.end()
     return self
Example #29
0
def readhdf(filename, fieldname, ignoreE = True):
    try:
        hdf = SD(filename, SDC.READ)
        data = hdf.select(fieldname)[:].copy()
        hdf.end()
    except Exception, e:
        if not ignoreE:print e
        data = Dataset(filename)[fieldname][:].copy()
Example #30
0
def get_lst_and_snow_days(hdf_file):
    data_hdf = SD(hdf_file)
    sds = data_hdf.select("design_matrix")
    lst_days = [int(x) for x in sds.lst_days.split(",")]
    snow_days = [int(x) for x in sds.snow_days.split(",")]
    sds.endaccess()
    data_hdf.end()
    return lst_days, snow_days
 def extract_attributes(self):
     # extract the attributes from the data file
     try:
         granule = SD(self.data_path, 1)
     except TypeError:
         granule = SD(self.data_path.encode("utf-8"), 1)
     self.attributes = granule.attributes()
     granule.end()
Example #32
0
def read(filename):
    """
    Returns R,theta,phi,Rc,thetac,phic,br,btheta,bphi,vr,vtheta,vphi,rho,cs
    """

    hdffile = SD(filename, SDC.READ)

    x = hdffile.select('X_grid').get()
    y = hdffile.select('Y_grid').get()
    z = hdffile.select('Z_grid').get()
    bx = hdffile.select('bx_').get()[:-1, :-1, :-1]
    by = hdffile.select('by_').get()[:-1, :-1, :-1]
    bz = hdffile.select('bz_').get()[:-1, :-1, :-1]
    vx = hdffile.select('vx_').get()[:-1, :-1, :-1]
    vy = hdffile.select('vy_').get()[:-1, :-1, :-1]
    vz = hdffile.select('vz_').get()[:-1, :-1, :-1]
    rho = hdffile.select('rho_').get()[:-1, :-1, :-1]
    cs = hdffile.select('c_').get()[:-1, :-1, :-1]

    t = hdffile.time
    hdffile.end()

    # =========== Cell centers ==============
    xc = 0.125 * (x[:-1, :-1, :-1] + x[1:, :-1, :-1] + x[:-1, 1:, :-1] +
                  x[:-1, :-1, 1:] + x[1:, 1:, :-1] + x[1:, :-1, 1:] +
                  x[:-1, 1:, 1:] + x[1:, 1:, 1:])
    yc = 0.125 * (y[:-1, :-1, :-1] + y[1:, :-1, :-1] + y[:-1, 1:, :-1] +
                  y[:-1, :-1, 1:] + y[1:, 1:, :-1] + y[1:, :-1, 1:] +
                  y[:-1, 1:, 1:] + y[1:, 1:, 1:])
    zc = 0.125 * (z[:-1, :-1, :-1] + z[1:, :-1, :-1] + z[:-1, 1:, :-1] +
                  z[:-1, :-1, 1:] + z[1:, 1:, :-1] + z[1:, :-1, 1:] +
                  z[:-1, 1:, 1:] + z[1:, 1:, 1:])
    # =======================================

    R = sqrt(x**2 + y**2 + z**2)
    theta = arccos(z / R)
    phi = arctan2(y, x)
    phi[phi < 0] += 2 * pi

    Rc = sqrt(xc**2 + yc**2 + zc**2)
    thetac = arccos(zc / Rc)
    phic = arctan2(yc, xc)
    phic[phic < 0] += 2 * pi

    br = bx * cos(phic) * sin(thetac) + by * sin(phic) * sin(
        thetac) + bz * cos(thetac)
    btheta = bx * cos(phic) * cos(thetac) + by * sin(phic) * cos(
        thetac) - bz * sin(thetac)
    bphi = -bx * sin(phic) + by * cos(phic)

    vr = vx * cos(phic) * sin(thetac) + vy * sin(phic) * sin(
        thetac) + vz * cos(thetac)
    vtheta = vx * cos(phic) * cos(thetac) + vy * sin(phic) * cos(
        thetac) - vz * sin(thetac)
    vphi = -vx * sin(phic) + vy * cos(phic)

    return (t, R, theta, phi, Rc, thetac, phic, br, btheta, bphi, vr, vtheta,
            vphi, rho, cs)
Example #33
0
    def _read(self, fgeom, fhgt):
        """Reads location/geometry and height files"""

        try:
            if self.verb:
                print "[] Working on " + fgeom
            hfile = SD(fgeom)
        except HDF4Error:
            if self.verb > 2:
                print "- %s: not recognized as an HDF file" % fgeom
            return
        for sds in self.SDS_geom:
            sds_ = sds.replace(' ', '_')
            #T          self.__dict__[sds_] = hfile.select(sds).get()
            #           self.__dict__[sds_] = hfile.select(sds).get()[:1856,:1856] #00 #T too big as is
            #           self.__dict__[sds_] = hfile.select(sds).get()[:1856,1856:] #01 #T too big as is
            #           self.__dict__[sds_] = hfile.select(sds).get()[1856:,:1856] #10 #T too big as is
            self.__dict__[sds_] = hfile.select(sds).get()[
                1856:, 1856:]  #11 #T too big as is
        hfile.end()

        try:
            if self.verb:
                print "[] Working on " + fhgt
            hfile = SD(fhgt)
        except HDF4Error:
            if self.verb > 2:
                print "- %s: not recognized as an HDF file" % fhgt
            return
        for sds in self.SDS_hgt:
            sds_ = sds.replace(' ', '_')
            #T          self.__dict__[sds_] = hfile.select(sds).get()
            #           self.__dict__[sds_] = hfile.select(sds).get()[:1856,:1856] #00 #T too big as is
            #           self.__dict__[sds_] = hfile.select(sds).get()[:1856,1856:] #01 #T too big as is
            #           self.__dict__[sds_] = hfile.select(sds).get()[1856:,:1856] #10 #T too big as is
            self.__dict__[sds_] = hfile.select(sds).get()[
                1856:, 1856:]  #11 #T too big as is
        hfile.end()

        # convert "ocean height" to zero
        self.P001_MSG_GLOBE_DEM[self.P001_MSG_GLOBE_DEM == OCEAN] = 0

        # store mask of off-view points
        self.offview = np.logical_or(self.Longitude == OFFVIEW,
                                     self.Latitude == OFFVIEW)
        self.offview = np.logical_or(self.offview,
                                     self.P001_MSG_GLOBE_DEM == OFFVIEW)

        # store unflattened shape
        self.orgshape = self.offview.shape

        # store flattened, in-view SDS
        for sds in self.SDS_geom + self.SDS_hgt:
            sds_ = sds.replace(' ', '_')
            self.__dict__[sds_] = self.__dict__[sds_][~self.offview]

        # number of obs
        self.nobs = self.Longitude.size
Example #34
0
def Init1KMMODISData(inHdfFile):
    print 'hdf init'

    latitude = CF.SDSObject()
    longitude = CF.SDSObject()
    refArr = CF.SDSObject()
    sunZ = CF.SDSObject()
    senZ = CF.SDSObject()
    sunA = CF.SDSObject()
    senA = CF.SDSObject()
    #hdfDSArr = [CF.SDSObject()] * 7

    hdf = SD(inHdfFile, SDC.READ)

    lat = hdf.select('Latitude')
    latitude.data = np.array(lat[:, :])
    latitude.Nl = latitude.data.shape[0]
    latitude.Np = latitude.data.shape[1]

    lon = hdf.select('Longitude')
    longitude.data = array(lon[:, :])
    longitude.Nl = longitude.data.shape[0]
    longitude.Np = longitude.data.shape[1]

    sunz = hdf.select('SolarZenith')
    sunZ.data = array(sunz[:, :]) * 0.01
    sunZ.Nl = sunZ.data.shape[0]
    sunZ.Np = sunZ.data.shape[1]

    senz = hdf.select('SensorZenith')
    senZ.data = array(senz[:, :]) * 0.01
    senZ.Nl = senZ.data.shape[0]
    senZ.Np = senZ.data.shape[1]

    suna = hdf.select('SolarAzimuth')
    sunA.data = array(suna[:, :]) * 0.01
    sunA.Nl = sunA.data.shape[0]
    sunA.Np = sunA.data.shape[1]

    sena = hdf.select('SensorAzimuth')
    senA.data = array(sena[:, :]) * 0.01
    senA.Nl = senA.data.shape[0]
    senA.Np = senA.data.shape[1]

    # EV_1KM_RefSB  EV_250_Aggr1km_RefSB
    refArr.data = np.zeros((CF.Nbands, 2030, 1354))
    ref = hdf.select('EV_250_Aggr1km_RefSB')
    #refArr.data = ((array(ref[:,:,:]) - CF.refOffset) * CF.refScale ) / np.cos( sunZ.data * CF.DEG2RAD)
    refArr.data[:2, :, :] = array(ref[:, :, :])
    refArr.Nl = refArr.data.shape[1]
    refArr.Np = refArr.data.shape[2]

    # EV_500_Aggr1km_RefSB
    ref = hdf.select('EV_500_Aggr1km_RefSB')
    refArr.data[2:7, :, :] = array(ref[:, :, :])

    hdf.end()
    return latitude, longitude, refArr, sunZ, senZ, sunA, senA
Example #35
0
def load_sd(fname, varnames):
    '''return list containing SD (Scientific Dataset) structures specified by 
	list varnames, contained in hdf4 file with filename fname'''
    dataf = SD(fname)
    data_list = []
    for name in varnames:
        data_list.append(dataf.select(name)[:])
    dataf.end()
    return data_list
Example #36
0
def get_predictors_and_response(hdf_file):
    """
    :param hdf_file:
    :return: tuple of predictors and the response
    """
    data_hdf = SD(hdf_file)
    design_matrix = data_hdf.select("design_matrix").get()
    data_hdf.end()
    return design_matrix[:, :-1], design_matrix[:, -1]
Example #37
0
def main(inputhdfile):
    hdf = SD(inputhdfile, 1)
    sensing_time = hdf.SENSING_TIME
    hdf.end()
    datedttimepattern = "%Y-%m-%dT%H:%M:%S"
    scene_time = datetime.datetime.strptime(
        sensing_time.split(".")[0], datedttimepattern)
    hms = "%s%s%s" % (str(scene_time.hour).zfill(2), str(
        scene_time.minute).zfill(2), str(scene_time.second).zfill(2))
    sys.stdout.write(hms)
Example #38
0
def get_core(filename):
    """
    given the path to a Modis hdf4 file with a "CoreMetadata.0" attribute
    return that value as a string
    """
    filename = str(filename)
    the_file = SD(filename, SDC.READ)
    metaDat = the_file.attributes()["CoreMetadata.0"]
    core_meta = str(metaDat).rstrip(" \t\r\n\0")
    the_file.end()
    return core_meta
Example #39
0
def get_dims(file_):

    fh = SD(file_, SDC.READ)

    dims = OrderedDict()
    dim_names = ['log Z', 'Log Mu', 'Tau_V', 'Xsi', 'Log U']
    for dim_name in dim_names:
        dims[dim_name] = fh.select(dim_name)[:]

    fh.end()
    return dims
Example #40
0
def read_var(fname,varname,normalized=False):
    f     = SD(fname,SDC.READ)
    phi   = f.select('fakeDim0')[:]
    theta = f.select('fakeDim1')[:]
    r     = f.select('fakeDim2')[:]
    var   = f.select('Data-Set-2')[:]
    f.end()

    if normalized:
        return(phi,theta,r,var)
    else:
        return(phi,theta,r*mas_units['length'],var*mas_units[varname])
Example #41
0
def t12(mfile):
    hdffile = SD(mfile)
    x = hdffile.select('MYD021KM_EV_1KM_Emissive_Band32')
    scale_factor = x.attributes()['scale_factor']
    add_offset = x.attributes()['add_offset']
    x = x[:]
    hdffile.end()    
    
    idx = x > 65534
    x = (x - add_offset) * scale_factor
    t = planck (12.02, x)
    t[idx] = np.nan
    return t
Example #42
0
class GeoProf(object):
    
    def __init__(self, filename):
        
        self.hdf = SD(filename)
        self.filename = filename
        self.z = filename[-2:]
        self.orbit = filename[-15:-4]
        self.id = filename[-25:-4]
        
    def close(self):
        self.hdf.end()
        self.hdf = None
        
    def _read_var(self, varname):
        hdfvar = self.hdf.select(varname)
        data = hdfvar[:]
        hdfvar.endaccess()
        return data
    
    def coords(self):
        lat = self._read_var('Latitude')
        lon = self._read_var('Longitude')
        return lon, lat
    
    def time(self):
        time = self._read_var('Time')
        return time
    
    
    def altitude(self):
        ''' altitude in kilometers '''
        
        alt = self._read_var('Height') / 1e3
        return alt
    
    def cloudmask(self):
        '''
        "0 = No cloud detected\n",
        "1 = likely bad data\n",
        "5 = likely ground clutter\n",
        "5-10 = week detection found using along track integration\n",
        "20 to 40 = Cloud detected .. increasing values represents clouds with lower chance of a being a false detection" ;
        _FillValue = '\200' ;
        
        shape [nprof, nalt]
        '''
        
        cm = self._read_var('CPR_Cloud_mask')
        
        return cm
Example #43
0
def read(filename):
    """
    Returns R,theta,phi,Rc,thetac,phic,br,btheta,bphi,vr,vtheta,vphi,rho,cs
    """

    hdffile = SD(filename,SDC.READ)

    x=hdffile.select('X_grid').get()
    y=hdffile.select('Y_grid').get()
    z=hdffile.select('Z_grid').get()
    bx=hdffile.select('bx_').get()[:-1,:-1,:-1]
    by=hdffile.select('by_').get()[:-1,:-1,:-1]
    bz=hdffile.select('bz_').get()[:-1,:-1,:-1]
    vx=hdffile.select('vx_').get()[:-1,:-1,:-1]
    vy=hdffile.select('vy_').get()[:-1,:-1,:-1]
    vz=hdffile.select('vz_').get()[:-1,:-1,:-1]
    rho=hdffile.select('rho_').get()[:-1,:-1,:-1]
    cs=hdffile.select('c_').get()[:-1,:-1,:-1]

    t=hdffile.time
    hdffile.end()

    # =========== Cell centers ==============
    xc=0.125*( x[:-1,:-1,:-1]+x[1:,:-1,:-1]+x[:-1,1:,:-1]+x[:-1,:-1,1:]+
              x[1:,1:,:-1]+x[1:,:-1,1:]+x[:-1,1:,1:]+x[1:,1:,1:] )
    yc=0.125*( y[:-1,:-1,:-1]+y[1:,:-1,:-1]+y[:-1,1:,:-1]+y[:-1,:-1,1:]+
              y[1:,1:,:-1]+y[1:,:-1,1:]+y[:-1,1:,1:]+y[1:,1:,1:] )
    zc=0.125*( z[:-1,:-1,:-1]+z[1:,:-1,:-1]+z[:-1,1:,:-1]+z[:-1,:-1,1:]+
              z[1:,1:,:-1]+z[1:,:-1,1:]+z[:-1,1:,1:]+z[1:,1:,1:] )
    # =======================================
    
    R=sqrt(x**2+y**2+z**2)
    theta=arccos(z/R)
    phi=arctan2(y,x)
    phi[phi<0]+=2*pi

    Rc=sqrt(xc**2+yc**2+zc**2)
    thetac=arccos(zc/Rc)
    phic=arctan2(yc,xc)
    phic[phic<0]+=2*pi


    br     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
    btheta = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
    bphi   =-bx*sin(phic)            + by*cos(phic)

    vr     = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
    vtheta = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
    vphi   =-vx*sin(phic)            + vy*cos(phic)

    return(t,R,theta,phi,Rc,thetac,phic,br,btheta,bphi,vr,vtheta,vphi,rho,cs)
Example #44
0
def callback(body, message):
    """Do actual work."""

    logger.info("body in callback() is %s" % body)

    # pull lat/lon, time
    path = body
    sd = SD(path)
    lat = N.array(sd.select('Latitude').get())
    lon = N.array(sd.select('Longitude').get())
    t = N.array(sd.select('Time').get())
    sd.end()
    #logger.info("lat: %s" % str(lat.shape))
    #logger.info("lon: %s" % str(lon.shape))
    #logger.info("time: %s" % str(t.shape))

    # build metadata json
    id = os.path.basename(path)
    md = {
        "id": id,
        "dataset": "AIRX2RET",
        "starttime": t[0,0],
        "endtime": t[44,29],
        "location": {
            "coordinates": [[
                [ lon[0,0], lat[0,0] ],
                [ lon[0,29], lat[0,29] ],
                [ lon[44,29], lat[44,29] ],
                [ lon[44,0], lat[44,0] ],
                [ lon[0,0], lat[0,0] ],
            ]], 
            "type": "polygon"
        }, 
        "urls": "http://mozart/data/public/products/%s" % id
    }

    # publish
    pub_dir = '/data/public/products'
    ensure_dir(pub_dir)
    shutil.move(path, os.path.join(pub_dir, id))

    # insert into ElasticSearch
    index = doctype = 'airs'
    conn = ES('http://localhost:9200')
    mapping = json.load(open('grq_mapping.json'))
    if not conn.indices.exists_index(index):
        conn.indices.create_index(index, mapping)
    conn.indices.put_mapping(doctype, mapping, index)
    ret = conn.index(md, index, doctype, md['id'])

    message.ack()
Example #45
0
class _Cal:
    """
    Trying to open a non-existing CALIOP file gives an exception
    """

    def __init__(self, filename):
        warnings.simplefilter('ignore', DeprecationWarning)

        self.hdf = SD(filename, SDC.READ)
        self.filename = filename
        # time of orbit start
        self.orbit = filename[-15:-4]
        self.z = self.orbit[-2:]  # zn or zd
        self.year = int(filename[-25:-21])
        self.month = int(filename[-20:-18])
        self.day = int(filename[-17:-15])
        self.hour = int(filename[-14:-12])
        self.minutes = int(filename[-11:-9])
        self.seconds = int(filename[-8:-6])
        # date tag + orbit start
        self.id = filename[-25:-4]
        self.date = datetime.datetime(self.year, self.month, self.day,
                                      self.hour, self.minutes, self.seconds)

    def __repr__(self):
        return self.filename

    def close(self):
        self.hdf.end()
        self.hdf = None

    # IO

    def _read_var(self, var, idx=None):
        """
        read a variable (1D or 2D) in HDF file
        """
    
        hdfvar = self.hdf.select(var)
        if idx is None:
            data = hdfvar[:]
        else:
            if len(hdfvar.dimensions()) == 1:
                data = hdfvar[idx[0]:idx[1]]
            else:
                data = hdfvar[idx[0]:idx[1], :]
        hdfvar.endaccess()
        return data
    def parseMetadata(self, filepath):

        metadata = {}
    
        dir, filename = os.path.split(filepath)
        if re.match(FILENAME_PATTERN, filename):
            logging.info("Parsing HDF file=%s" % filepath)

            # open HDF file
            try:
                hdfFile = SD(filepath, SDC.READ)
            except HDF4Error as e:
                logging.info(e)
                raise e

            # variables
            variables = hdfFile.datasets().keys()

            # time fields
            year = hdfFile.select('Year')[:]
            month = hdfFile.select('Month')[:]
            day = hdfFile.select('Day')[:]
            hour = hdfFile.select('Hour')[:]
            minute = hdfFile.select('Minute')[:]
            second = hdfFile.select('Seconds')[:]

            # space fields
            lon = hdfFile.select('Longitude')[:]
            lat = hdfFile.select('Latitude')[:]

            datetimes = []
            lats = []
            lons = []
            for t in range(22):
                for x in range(15):
                    if year[t,x] != -9999:

                        datetimes.append( dt.datetime(year[t,x],month[t,x],day[t,x],hour[t,x],minute[t,x],second[t,x], tzinfo=tzutc()) )
                        lons.append( lon[t,x] )
                        lats.append( lat[t,x] )
                        
            # store metadata values
            storeMetadata(metadata, np.asarray(lons), np.asarray(lats), np.asarray(datetimes), variables)

            # close HDF file
            hdfFile.end()

        return metadata
Example #47
0
def write_hdf(filename, dataset, data):
    '''
    write a dataset in hdf file
    '''

    hdf = SD(filename, SDC.WRITE|SDC.CREATE)

    typ = {
            'int16'   : SDC.INT16,
            'float32' : SDC.FLOAT32,
            'float64' : SDC.FLOAT64,
            }[data.dtype.name]

    sds = hdf.create(dataset, typ, data.shape)
    sds[:] = data[:]
    sds.endaccess()

    hdf.end()
def write_interpolated(filename, f0, f1, fact, datasetNames):
    '''
    interpolate two hdf files f0 and f1 using factor fact, and
    write the result to filename
    '''

    hdf = SD(filename, SDC.WRITE|SDC.CREATE)
    for datasetName in datasetNames:

        try:
            info = SD(f0).select(datasetName).info()
        except:
            print >> stderr, 'Error loading %s in %s' % (datasetName, f0)
            raise

        typ  = info[3]
        shp  = info[2]
        sds_in1 = SD(f0).select(datasetName)
        met0 = sds_in1.get()
        met1 = SD(f1).select(datasetName).get()

        interp = (1-fact)*met0 + fact*met1

        interp = interp.astype({
                SDC.INT16: 'int16',
                SDC.FLOAT32: 'float32',
                SDC.FLOAT64: 'float64',
            }[typ])

        # write
        sds = hdf.create(datasetName, typ, shp)
        sds[:] = interp[:]

        # copy attributes
        attr = sds_in1.attributes()
        if len(attr) > 0:
            for name in attr.keys():
                setattr(sds, name, attr[name])
        sds.endaccess()

    hdf.end()
Example #49
0
    def setUp(self):
        """Create a test HDF4 file"""
        from pyhdf.SD import SD, SDC
        h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC)
        data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100))
        v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100))
        v1[:] = data
        v2 = h.create('ds1_i', SDC.INT16, (10, 100))
        v2[:] = data.astype(np.int16)

        # Add attributes
        h.test_attr_str = 'test_string'
        h.test_attr_int = 0
        h.test_attr_float = 1.2
        # h.test_attr_str_arr = np.array(b"test_string2")
        for d in [v1, v2]:
            d.test_attr_str = 'test_string'
            d.test_attr_int = 0
            d.test_attr_float = 1.2

        h.end()
Example #50
0
def r_theta_phi_uniform(filename):
    """
    Return R, theta, phi 1-d arrays, assuming uniform grid
    """

    hdffile = SD(filename,SDC.READ)

    # note, minimal data are read from file

    ni=hdffile.select('X_grid').ni-1
    nj=hdffile.select('X_grid').nj-1
    nk=hdffile.select('X_grid').nk-1


    # first get R; in principle, could just take z along the axis
    # but are allowing for the possibility that the axis is cut out
    x=hdffile.select('X_grid').get(start=(0,0,0),count=(1,1,ni+1)).squeeze()
    y=hdffile.select('Y_grid').get(start=(0,0,0),count=(1,1,ni+1)).squeeze()
    z=hdffile.select('Z_grid').get(start=(0,0,0),count=(1,1,ni+1)).squeeze()
    R = sqrt(x**2+y**2+z**2)

    z=hdffile.select('Z_grid').get(start=(0,0,0),count=(1,nj+1,1)).squeeze()
    theta = arccos(z/R[0])

    x=hdffile.select('X_grid').get(start=(0,nj/2,0),count=(nk+1,1,1)).squeeze()
    y=hdffile.select('Y_grid').get(start=(0,nj/2,0),count=(nk+1,1,1)).squeeze()
    phi=arctan2(y,x)
    phi[phi<0]+=2*pi
    phi[-1]=phi[0]+2*pi
    
    hdffile.end()

    R/=6.96e10   # FIX ME HARD CODED UNITS
    Rc = 0.5*(R[1:]+R[:-1])
    thetac = 0.5*(theta[1:]+theta[:-1])
    phic = 0.5*(phi[1:]+phi[:-1])
    
    return R,theta,phi,Rc,thetac,phic
Example #51
0
def read_var_jslice(filename,var_name,j,thetac,phic):
    hdffile = SD(filename,SDC.READ)
    ni = hdffile.select('X_grid').ni-1
    nk = hdffile.select('X_grid').nk-1
    phic = phic[:,None]
    thetac = thetac[j]

    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
        if var_name in ['X_grid','Y_grid','Z_grid']:
            var=hdffile.select(var_name).get(start=(0,j,0),count=(nk,1,ni)).squeeze()
        else:
            var=hdffile.select(var_name+'_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()
    else:
        if var_name in ['br','btheta','bphi']:
            bx=hdffile.select('bx_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()
            by=hdffile.select('by_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()
            bz=hdffile.select('bz_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()

            if var_name=='br':
                var     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
            elif var_name=='btheta':
                var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
            else:
                var   =-bx*sin(phic)            + by*cos(phic)
        else:
            vx=hdffile.select('vx_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()
            vy=hdffile.select('vy_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()
            vz=hdffile.select('vz_').get(start=(0,j,0),count=(nk,1,ni)).squeeze()

            if var_name=='vr':
                var    = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
            elif var_name=='vtheta':
                var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
            else:
                var   =-vx*sin(phic)            + vy*cos(phic)
    hdffile.end()
    return(var)
Example #52
0
def read_var(filename,var_name):
    hdffile = SD(filename,SDC.READ)
    if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
        var=hdffile.select(var_name+'_').get()[:-1,:-1,:-1]
    else:
        R,theta,phi=r_theta_phi_uniform(filename)
        thetac = 0.5*(theta[1:]+theta[:-1])
        phic = 0.5*(phi[1:]+phi[:-1])
        
        phic = phic[:,None,None]
        thetac = thetac[None,:,None]

        if var_name in ['br','btheta','bphi']:
            bx=hdffile.select('bx_').get()[:-1,:-1,:-1]
            by=hdffile.select('by_').get()[:-1,:-1,:-1]
            bz=hdffile.select('bz_').get()[:-1,:-1,:-1]

            if var_name=='br':
                var     = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
            elif var_name=='btheta':
                var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
            else:
                var   =-bx*sin(phic)            + by*cos(phic)
        else:
            vx=hdffile.select('vx_').get()[:-1,:-1,:-1]
            vy=hdffile.select('vy_').get()[:-1,:-1,:-1]
            vz=hdffile.select('vz_').get()[:-1,:-1,:-1]

            if var_name=='vr':
                var    = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
            elif var_name=='vtheta':
                var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
            else:
                var   =-vx*sin(phic)            + vy*cos(phic)
    hdffile.end()
    return(var)
def load_vision(filename, var=None, T=1):
    """load_vision loads a Vision log file and
       returns its content in a dict.
    """

    assert exists(filename), 'Invalid filename.'

    f = SD(filename, SDC.READ)

    # New time axis
    end = ceil(f.select('ts_group_0').get()[-1])
    new_time = np.arange(0, end, T)

    # Initialize dict
    req_data = {'t': new_time}

    # Loop over variable list and loaded signals to search for matches
    if not var:
        req_data.update({key.split('.')[-1]: _select_interp(new_time, f, key)
                         for key in f.datasets().keys()
                         if not key.startswith('ts_')})
    elif isinstance(var, basestring):
        first_match = next((key for key in f.datasets().keys() if var in key),
                           None)
        req_data.update({var: _select_interp(new_time, f, first_match)})
    else:
        first_match = zip(var,
                          [next((key for key in f.datasets().keys()
                                 if sig in key), None)
                           for sig in var])
        req_data.update({sig: _select_interp(new_time, f, key)
                         for sig, key in first_match})

    f.end()

    return req_data
Example #54
0
class HDF_SDS(object):
    """
    This class is used in place of the pyhdf.SD.SDS class to allow the file contents to be loaded at a later time
    rather than in this module read method (so that we can close the SD instances and free up file handles)
    """

    _sd = None
    _sds = None
    _filename = None
    _variable = None

    def __init__(self, filename, variable):
        self._filename = filename
        self._variable = variable

    def _open_sds(self):
        """
        Open the SDS file for reading
        """
        from pyhdf.SD import SD as SDS

        self._sd = SDS(self._filename)
        self._sds = self._sd.select(self._variable)

    def _close_sds(self):
        """
        Close the SDS file for reading

        NB: Exceptions thrown from here may hide an exception thrown in get(), info(), etc.
        """
        try:
            if self._sds is not None:
                self._sds.endaccess()
        finally:
            if self._sd is not None:
                self._sd.end()

    def get(self):
        """
        Call pyhdf.SD.SDS.get(), opening and closing the file
        """
        try:
            self._open_sds()
            data = self._sds.get()
            return data
        finally:
            self._close_sds()

    def attributes(self):
        """
        Call pyhdf.SD.SDS.attributes(), opening and closing the file
        """
        try:
            self._open_sds()
            attributes = self._sds.attributes()
            return attributes
        finally:
            self._close_sds()

    def info(self):
        """
        Call pyhdf.SD.SDS.info(), opening and closing the file
        """
        try:
            self._open_sds()
            info = self._sds.info()
            return info
        finally:
            self._close_sds()
def main():  # Main code

# Set the overall timer

    all_start_time = time.time()

# Set the pixel sizes for 1.1 km data

    x_size = 512
    y_size = 128

# Set the minimum and maximum AOD for plotting

    aod_plot_min = 0.00
    aod_plot_max = 0.30
    aod_plot_ticks = 7 # Usually 1 more than you think
    aod_plot_step = 0.05

# Set the paths

    agppath = '/Volumes/ChoOyu/DATA/AGP/'
    aeropath = '/Volumes/ChoOyu/DATA/2013_01_20/MISR/'
    figpath = '/Users/mgaray/Desktop/CODING/PYTHON/PY27/MAY15/AEROSOL/FIGS/'

# Set the MISR product

    misr_name = '0022'  # 17.6 km Standard Product

# Set the MISR orbit

    misr_path = 'P042'
    misr_orbit = '69644'

# Set the block range

    start_block = 60 # Start block 1-based
    num_block = 4
    out_base = '_O'+misr_orbit+'_{}'.format(start_block)
    out_base = out_base+'_{}'.format(num_block)+'_'+misr_name+'_DRAGON_01.png'

# Set the AERONET lat, lon, and values
# NOTE: These are taken from the AERONET_colocate_to_MISR.csv file

    aero_lat = [34.137,35.238,35.332,36.819,36.102,36.706,36.785,36.316,
        36.206,36.953,36.597,36.032,35.504,36.634,36.314,36.785]
    
    aero_lon = [-118.126,-118.788,-119.000,-119.716,-119.566,-119.741,-119.773,
        -119.643,-120.105,-120.034,-119.504,-119.055,-119.272,-120.382,-119.393,
        -119.773]
        
    aero_aod = [0.009,0.212,0.231,0.111,0.226,0.130,0.120,0.208,0.159,0.093,
        0.170,0.149,0.201,0.165,0.215,0.114]

### Read the AGP Data for Navigation of Old-Style Data

# Start the timer

    start_time = time.time()

# Change directory to the AGP-path

    os.chdir(agppath)

# Search for the correct MISR path
    
    search_str = 'MISR*'+misr_path+'*.hdf'
    file_list = glob.glob(search_str)

# Set the filename

    inputName = file_list[0]

# Tell user location in process

    print("Reading: "+inputName)

# Open the file

    hdf = SD(inputName, SDC.READ)

# Read the data fields

    var01 = hdf.select('GeoLatitude')
    var02 = hdf.select('GeoLongitude')

    lat_raw = var01.get()
    lon_raw = var02.get()

# Close the file

    hdf.end()

# Print the time

    end_time = time.time()
    print("Time to Read AGP data was %g seconds" % (end_time - start_time))

# Extract the navigation information to get corners for the entire image

    lat = lat_raw[start_block-1,:,:]
    lon = lon_raw[start_block-1,:,:]
    lat_max = np.amax(lat)
    lon_max = np.amax(lon)

    lat = lat_raw[start_block-1+num_block-1,:,:]
    lon = lon_raw[start_block-1+num_block-1,:,:]
    lat_min = np.amin(lat)
    lon_min = np.amin(lon)
        
### Read the V22 Data (Old-Style)

# Start the timer

    start_time = time.time()

# Change directory to the basepath

    os.chdir(aeropath)

### Get the first MISR Aerosol File

    search_str = 'MISR_AM1_AS_AEROSOL*'+misr_orbit+'*'+misr_name+'.hdf'

    file_list = glob.glob(search_str)

# Set the filename

    inputName = file_list[0]

# Tell user location in process

    print("Reading: "+inputName)

# Open the file

    hdf = SD(inputName, SDC.READ)

# Read two data fields

    var01 = hdf.select('AlgTypeFlag')
    var02 = hdf.select('AerRetrSuccFlag')
    var03 = hdf.select('RegBestEstimateSpectralOptDepth')
    var04 = hdf.select('RegLowestResidSpectralOptDepth')

    alg_type_v22 = var01.get()
    succ_flag_v22 = var02.get()
    rbe_aod_v22 = var03.get()
    rlr_aod_v22 = var04.get()

# Close the file

    hdf.end()    

# Print the time

    end_time = time.time()
    print("Time to Read Aerosol data was %g seconds" % (end_time - start_time))

# Process the success flag as a mask (set success to 1, otherwise 0)

    sf_v22 = np.copy(succ_flag_v22)
    sf_v22[succ_flag_v22 != 7] = 0
    sf_v22[succ_flag_v22 == 7] = 1
    
## MAP THE BEST ESTIMATE DATA

# Set the plot area

    fig = plt.figure(figsize=(12,6), dpi=120)

# Set the title

    plt.title('V22 Best Estimate AOD')

# Draw basemap

    m = Basemap(llcrnrlon=lon_min,llcrnrlat=lat_min,urcrnrlon=lon_max,urcrnrlat=lat_max,
        projection='cyl',resolution='i')
    m.drawmapboundary(fill_color='0.3')

## Loop over blocks

    for i in range(num_block):

        block = start_block + i - 1 # Block 0-based

# Extract the data

        lat = lat_raw[block,:,:]
        lon = lon_raw[block,:,:]
        aod_01 = rbe_aod_v22[block,:,:,1]*1. # Green band
        succ_01 = sf_v22[block,:,:]*1.
        keep = (succ_01 > 0.0)
        print('V22')
        print('Mean AOD = ',np.mean(aod_01[keep]))
        print(' Max AOD = ',np.amax(aod_01[keep]))

# Mask locations without valid retrievals

        mask = (succ_01 < 1.0)
        aod_01[mask] = 0.0
        
# Resize the navigation from 1.1 km to 4.4 km (to match new product)

        lat_small = np.squeeze(lat.reshape([32,4,128,4]).mean(3).mean(1))
        lon_small = np.squeeze(lon.reshape([32,4,128,4]).mean(3).mean(1))

# Resize the data to match the navigation
# Note: The factor is 4 in both dimensions (17.6 -> 4.4 km)

        aod_full = np.repeat(np.repeat(aod_01,4,axis=0),4,axis=1)

# Plot the MISR data

        img = aod_full

#        im = m.pcolormesh(lon,lat,img,shading='flat',cmap=plt.cm.hot_r,latlon=True,
#            vmin=aod_plot_min,vmax=aod_plot_max)

        im = m.pcolormesh(lon_small,lat_small,img,shading='flat',
            cmap=plt.cm.CMRmap_r,latlon=True,vmin=aod_plot_min,vmax=aod_plot_max)

# Plot the AERONET points

        m.scatter(aero_lon,aero_lat,marker='o',c=aero_aod,
            cmap=plt.cm.CMRmap_r,vmin=aod_plot_min,vmax=aod_plot_max,s=25)

# Draw latitude lines

# m.drawparallels(np.arange(4.,13.,1.),labels=[0,1,0,0])

# Add the coastlines

#coast_color = 'blue'
    coast_color = 'green'
    m.drawcoastlines(color=coast_color,linewidth=0.8)

# Add the colorbar

#    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
#        ticks=[0,0.05,0.10,0.15,0.20])
    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
        ticks=np.arange(aod_plot_ticks)*aod_plot_step)
        
    cb.set_label('Green Band AOD')

# Save the figure

    os.chdir(figpath)
    outname = 'AOD_BE_Map'+out_base
    plt.savefig(outname,dpi=120)
    
## MAP THE LOWEST RESIDUAL DATA

# Set the plot area

    fig = plt.figure(figsize=(12,6), dpi=120)

# Set the title

    plt.title('V22 Lowest Residual AOD')

# Draw basemap

    m = Basemap(llcrnrlon=lon_min,llcrnrlat=lat_min,urcrnrlon=lon_max,urcrnrlat=lat_max,
        projection='cyl',resolution='i')
    m.drawmapboundary(fill_color='0.3')

## Loop over blocks

    for i in range(num_block):

        block = start_block + i - 1 # Block 0-based

# Extract the data

        lat = lat_raw[block,:,:]
        lon = lon_raw[block,:,:]
        aod_01 = rlr_aod_v22[block,:,:,1]*1. # Green band
        succ_01 = sf_v22[block,:,:]*1.
        keep = (succ_01 > 0.0)
        print('V22')
        print('Mean AOD = ',np.mean(aod_01[keep]))
        print(' Max AOD = ',np.amax(aod_01[keep]))

# Mask locations without valid retrievals

        mask = (succ_01 < 1.0)
        aod_01[mask] = 0.0
        
# Resize the navigation from 1.1 km to 4.4 km (to match new product)

        lat_small = np.squeeze(lat.reshape([32,4,128,4]).mean(3).mean(1))
        lon_small = np.squeeze(lon.reshape([32,4,128,4]).mean(3).mean(1))

# Resize the data to match the navigation
# Note: The factor is 4 in both dimensions (17.6 -> 4.4 km)

        aod_full = np.repeat(np.repeat(aod_01,4,axis=0),4,axis=1)

# Plot the MISR data

        img = aod_full

#        im = m.pcolormesh(lon,lat,img,shading='flat',cmap=plt.cm.hot_r,latlon=True,
#            vmin=aod_plot_min,vmax=aod_plot_max)

        im = m.pcolormesh(lon_small,lat_small,img,shading='flat',
            cmap=plt.cm.CMRmap_r,latlon=True,vmin=aod_plot_min,vmax=aod_plot_max)

# Plot the AERONET points

        m.scatter(aero_lon,aero_lat,marker='o',c=aero_aod,
            cmap=plt.cm.CMRmap_r,vmin=aod_plot_min,vmax=aod_plot_max,s=25)

# Draw latitude lines

# m.drawparallels(np.arange(4.,13.,1.),labels=[0,1,0,0])

# Add the coastlines

#coast_color = 'blue'
    coast_color = 'green'
    m.drawcoastlines(color=coast_color,linewidth=0.8)

# Add the colorbar

#    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
#        ticks=[0,0.05,0.10,0.15,0.20])
    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
        ticks=np.arange(aod_plot_ticks)*aod_plot_step)
        
    cb.set_label('Green Band AOD')

# Save the figure

    os.chdir(figpath)
    outname = 'AOD_LR_Map'+out_base
    plt.savefig(outname,dpi=120)    

## CALCULATE AND PLOT THE REGRESSIONS AGAINST AERONET

# Extract the full (1.1 km) resolution navigation for all the blocks

    lat_all_full = lat_raw[(start_block-1):(start_block-1+num_block),:,:]
    lon_all_full = lon_raw[(start_block-1):(start_block-1+num_block),:,:]

# Rebin the navigation data to 4.4 km resolution

    lat_4 = np.squeeze(lat_all_full.reshape([num_block,1,32,4,
        128,4]).mean(5).mean(3))
    lon_4 = np.squeeze(lon_all_full.reshape([num_block,1,32,4,
        128,4]).mean(5).mean(3))

# Extract the full (17.6 km) resolution aerosol data for all blocks
        
    rbe_all_full = rbe_aod_v22[(start_block-1):(start_block-1+num_block),:,:,1]*1.
    rlr_all_full = rlr_aod_v22[(start_block-1):(start_block-1+num_block),:,:,1]*1.
    succ_all_full = sf_v22[(start_block-1):(start_block-1+num_block),:,:]*1.

# Rebin the aerosol data to 4.4 km resolution

    rbe_raw_4 = np.repeat(np.repeat(rbe_all_full,4,axis=1),4,axis=2)
    rlr_raw_4 = np.repeat(np.repeat(rlr_all_full,4,axis=1),4,axis=2)
    succ_4 = np.repeat(np.repeat(succ_all_full,4,axis=1),4,axis=2)

# Eliminate locations without a valid retrieval

    rbe_4 = rbe_raw_4 * succ_4
    rlr_4 = rlr_raw_4 * succ_4

# Get the number of AERONET sites
    
    num_aero = len(aero_aod)

# Set up arrays to store the matched data

    aero_match = np.zeros(num_aero)
    rbe_v22_match = np.zeros(num_aero)
    rlr_v22_match = np.zeros(num_aero)
    dist_match = np.zeros(num_aero)

# Loop over AERONET retrievals

    for i in range(num_aero):

# Extract AERONET information
    
        tlat = aero_lat[i]
        tlon = aero_lon[i]
        taod = aero_aod[i]

# Calculate the array of distances (in km) from the AERONET point vs. the MISR data
        
        dist = Haversine_Distance(tlat,tlon,lat_4,lon_4)
        
# Find the minimum distance (km)

        min_dist = np.amin(dist)
    
# Test for a match within a single (4.4 km) MISR pixel

        if(min_dist <= 4.4):

# Extract the match

            match = (dist == min_dist)   
            
# Extract the MISR data

            aero_match[i] = taod
            rbe_v22_match[i] = rbe_4[match]
            rlr_v22_match[i] = rlr_4[match]
            dist_match[i] = min_dist

# Plot the data

    max_val = aod_plot_max

# Set the plot area
# NOTE: The base plot size is 6 x 6, so a 2 row, 3 column set would be 18 x 12

    plt.figure(figsize=(12,6), dpi=120)

## Linear plot (Best Estimate)

    good = (rbe_v22_match > 0)
    ref_aod = aero_match[good]
    test_aod = rbe_v22_match[good]

    plt.subplot(1, 2, 1)
    plt.scatter(ref_aod,test_aod,marker='o',color='black',s=25)   
    plt.title("V22 Best Estimate")

# Plot the one-to-one line

    plt.plot([0.0,max_val], [0.0,max_val], color="k", lw=1)

# Plot the envelopes

    dummy_aod = np.logspace(-4,1,num=100)
    up1_aod = 1.20*dummy_aod
    up2_aod = dummy_aod+0.05
    upper_aod = np.maximum(up1_aod,up2_aod)

    lo1_aod = 0.80*dummy_aod
    lo2_aod = dummy_aod-0.05
    lower_aod = np.minimum(lo1_aod,lo2_aod)

    plt.plot(dummy_aod,lower_aod,color="0.75", lw=1)
    plt.plot(dummy_aod,upper_aod,color="0.75", lw=1)

# Set the limits and axis labels

    plt.xlim(0.0,max_val)
    plt.ylim(0.0,max_val)

    plt.xlabel('AERONET AOD')
    plt.ylabel('MISR AOD')

    plt.grid(True)

# Include some text on the Best Estimate Figure

    x_pos = 0.19

    plt.text(x_pos,0.08,'Best Estimate',fontsize=12) # Version

    count = len(test_aod)
    out_text = 'N = '+str(count)
    plt.text(x_pos,0.07,out_text,fontsize=10) # Count

    temp = np.corrcoef(ref_aod,test_aod)
    be_r = temp[0,1]
    out_text = 'r = '+"{0:.4f}".format(be_r)
    plt.text(x_pos,0.06,out_text,fontsize=10) # Correlation coefficient

    rmse = np.sqrt(((test_aod - ref_aod) ** 2).mean())
    out_text = 'RMSE = '+"{0:.4f}".format(rmse)
    plt.text(x_pos,0.05,out_text,fontsize=10) # Root mean squared error

    diff = test_aod - ref_aod
    bias = np.mean(diff)
    out_text = 'Bias = '+"{0:.4f}".format(bias)
    plt.text(x_pos,0.04,out_text,fontsize=10) # Bias

    offset = np.ones_like(ref_aod)*0.05
    inner = np.absolute(diff) < np.maximum(offset,ref_aod*0.2)
    in_frac = (np.sum(inner)/(1.0*count))*100.0
    out_text = 'Percent In = '+"{0:.2f}".format(in_frac)
    plt.text(x_pos,0.03,out_text,fontsize=10) # Percent in envelope

## Linear plot (Lowest Residual)

    good = (rlr_v22_match > 0)
    ref_aod = aero_match[good]
    test_aod = rlr_v22_match[good]

    plt.subplot(1, 2, 2)
    plt.scatter(ref_aod,test_aod,marker='o',color='black',s=25)   
    plt.title("V22 Lowest Residual")

# Plot the one-to-one line

    plt.plot([0.0,max_val], [0.0,max_val], color="k", lw=1)

# Plot the envelopes

    dummy_aod = np.logspace(-4,1,num=100)
    up1_aod = 1.20*dummy_aod
    up2_aod = dummy_aod+0.05
    upper_aod = np.maximum(up1_aod,up2_aod)

    lo1_aod = 0.80*dummy_aod
    lo2_aod = dummy_aod-0.05
    lower_aod = np.minimum(lo1_aod,lo2_aod)

    plt.plot(dummy_aod,lower_aod,color="0.75", lw=1)
    plt.plot(dummy_aod,upper_aod,color="0.75", lw=1)

# Set the limits and axis labels

    plt.xlim(0.0,max_val)
    plt.ylim(0.0,max_val)

    plt.xlabel('AERONET AOD')
    plt.ylabel('MISR AOD')

    plt.grid(True)

# Include some text on the Best Estimate Figure

    x_pos = 0.19

    plt.text(x_pos,0.08,'Lowest Resid',fontsize=12) # Version

    count = len(test_aod)
    out_text = 'N = '+str(count)
    plt.text(x_pos,0.07,out_text,fontsize=10) # Count

    temp = np.corrcoef(ref_aod,test_aod)
    be_r = temp[0,1]
    out_text = 'r = '+"{0:.4f}".format(be_r)
    plt.text(x_pos,0.06,out_text,fontsize=10) # Correlation coefficient

    rmse = np.sqrt(((test_aod - ref_aod) ** 2).mean())
    out_text = 'RMSE = '+"{0:.4f}".format(rmse)
    plt.text(x_pos,0.05,out_text,fontsize=10) # Root mean squared error

    diff = test_aod - ref_aod
    bias = np.mean(diff)
    out_text = 'Bias = '+"{0:.4f}".format(bias)
    plt.text(x_pos,0.04,out_text,fontsize=10) # Bias

    offset = np.ones_like(ref_aod)*0.05
    inner = np.absolute(diff) < np.maximum(offset,ref_aod*0.2)
    in_frac = (np.sum(inner)/(1.0*count))*100.0
    out_text = 'Percent In = '+"{0:.2f}".format(in_frac)
    plt.text(x_pos,0.03,out_text,fontsize=10) # Percent in envelope

# Save the figure

    os.chdir(figpath)
    outname = 'AOD_Regression'+out_base
    plt.savefig(outname,dpi=120)

# Show the plot

    plt.show()
    
# Print the time

    all_end_time = time.time()
    print("Total elapsed time was %g seconds" % (all_end_time - all_start_time))

# Tell user completion was successful

    print("\nSuccessful Completion\n")
def main():  # Main code

# Set the file exist (0 to start, 1 after initial run)
# NOTE: The NetCDF filename is hardcoded

#    exist = 0
    exist = 1

# Set the overall timer

    all_start_time = time.time()

# Set the pixel sizes for 4.4 km data
# Note: The MISR base (1.1 km) resolution is 512-x by 128-y

#    x_size = 512
#    y_size = 128
    
    x_44 = 128
    y_44 = 32    

# Set the time window

    time_window = 30  #  Time in minutes     

# Set the minimum and maximum AOD for plotting

    aod_plot_min = 0.00
    
#    aod_plot_max = 0.30
#    aod_plot_max = 0.60
    aod_plot_max = 1.40
    
#    aod_plot_ticks = 7 # Usually 1 more than you think
    aod_plot_ticks = 8 # Usually 1 more than you think
    
#    aod_plot_step = 0.05
#    aod_plot_step = 0.1
    aod_plot_step = 0.2

# Set the paths

    agppath = '/Volumes/ChoOyu/DATA/AGP/'
    aeropath = '/Volumes/ChoOyu/DATA/'
    datapath = '/Volumes/ChoOyu/DATA/AERONET/'
    figpath = '/Users/mgaray/Desktop/CODING/PYTHON/PY27/JUN15/AEROSOL/FIGS/'

# Set the MISR product

#    misr_name = '22b24-10e' # New Product
#    misr_name = '22b24-26' # New Product
#    misr_name = '22b24-26+1' # New Product
#    misr_name = '22b24-26+2' # New Product
#    misr_name = '22b24-27+3' # New Product
#    misr_name = '22b24-29' # New Product
#    misr_name = '22b24-29+1' # New Product
#    misr_name = '22b24-29+3' # New Product
#    misr_name = '22b24-34+0' # New Product
    misr_name = '22b24-34+1' # New Product
#    misr_name = '22b24-34+2' # New Product
#    misr_name = '22b24-34+3' # New Product

# Set the output file

    outfile = 'MISR_AERONET_DRAGON_V22b24-34+1_04.nc'

# Set the MISR orbit, path, and date information to test
# NOTE: Although this is redundant, it provides a check so that the correct
#       data are analyzed.

#    misr_orbit = '60934'
#    misr_orbit = '61633'
#    misr_orbit = '61662'
#    misr_orbit = '65440'
#    misr_orbit = '65731'
#    misr_orbit = '65775'
#    misr_orbit = '65906'
#    misr_orbit = '66139'
#    misr_orbit = '69644'
    misr_orbit = '69877'

#    misr_path = 'P016'
#    misr_path = 'P016'    
#    misr_path = 'P014'
#    misr_path = 'P115'
#    misr_path = 'P111'
#    misr_path = 'P116'
#    misr_path = 'P115'
#    misr_path = 'P115'
#    misr_path = 'P042'
    misr_path = 'P042'
    
#    misr_date = '2011_06_02'
#    misr_date = '2011_07_20'
#    misr_date = '2011_07_22'
#    misr_date = '2012_04_07'
#    misr_date = '2012_04_27'
#    misr_date = '2012_04_30'
#    misr_date = '2012_05_09'
#    misr_date = '2012_05_25'
#    misr_date = '2013_01_20'
    misr_date = '2013_02_05'

    start_block = 60 # MISR start block (1-based)
    end_block = 63 # MISR end block (1-based)

# Update the aerosol path

    aeropath = aeropath+misr_date+'/MISR/'

# Set the base file name

    num_block = end_block - start_block + 1 # Inclusive
    out_base = '_O'+misr_orbit+'_{}'.format(start_block)
    out_base = out_base+'_{}'.format(num_block)+'_'+misr_name+'_DRAGON_04.png'
            
### Read the aerosol data (New-Style)

# Start the timer

    start_time = time.time()

# Change directory to the basepath

    os.chdir(aeropath)

### Get the MISR Aerosol File

    search_str = 'MISR_AM1_AS_AEROSOL_USER_*'+misr_orbit+'*'+misr_name+'.'+'*.hdf'

    file_list = glob.glob(search_str)
    
# Choose the correct filename

    inputName = file_list[0]

# Tell user location in process

    print("Reading: "+inputName)

# Open the file

    hdf = SD(inputName, SDC.READ)

# Read two data fields

    var01 = hdf.select('Latitude')
    var02 = hdf.select('Longitude')
    var03 = hdf.select('TAI_Time')
    var04 = hdf.select('Land_Water_Retrieval_Type_Flag')
    var05 = hdf.select('Retrieval_Success_Flag')
    var06 = hdf.select('Aerosol_Optical_Depth_555')
    var07 = hdf.select('Aerosol_Optical_Depth_Per_Mixture')
    var08 = hdf.select('Chisq_Per_Mixture')

    lat_raw = var01.get()
    lon_raw = var02.get()
    tai_raw = var03.get()
    alg_type = var04.get()
    succ_flag = var05.get()
    rbe_aod = var06.get()
    aod_per_mix = var07.get()
    chisq_per_mix = var08.get()

# Close the file

    hdf.end()    

# Print the time

    end_time = time.time()
    print("Time to Read Aerosol data was %g seconds" % (end_time - start_time))

# Get the dimensions of the aerosol data

    b_aero = rbe_aod.shape[0]
    y_aero = rbe_aod.shape[1]
    x_aero = rbe_aod.shape[2]

# Process the success flag as a mask (if successful set success to 1, otherwise 0)

    success = np.copy(succ_flag)
    success[succ_flag != 1] = 0

# Extract the navigation information to get corners for the entire image
# Note: Because the MISR block numbers are 1-based, but the array indices are 0-based
#       the initial block should be start_block-1.  Because the number of blocks is
#       inclusive, the end block must be start_block-1 + num_block-1, otherwise an
#       additional block with be extracted.

    lat = lat_raw[start_block-1,:,:]
    lon = lon_raw[start_block-1,:,:]
    lat_max = np.amax(lat)
    lon_max = np.amax(lon)

    lat = lat_raw[start_block-1+num_block-1,:,:]
    lon = lon_raw[start_block-1+num_block-1,:,:]
    lat_min = np.amin(lat)
    lon_min = np.amin(lon)    
    
### Get the MISR time information
# Note: Because the MISR block numbers are 1-based, but the array indices are 0-based
#       the initial block should be start_block-1.  Because the number of blocks is
#       inclusive, the end block must be start_block-1 + num_block-1, otherwise an
#       additional block with be extracted.

# Start block

    bct_block = tai_raw[start_block-1]
    bct_raw = bct_block[16,64]  # Choose the central pixel in the block (arbitrary)

# The epoch defined by GPS in the astropy library and the one MISR uses for TAI
# are different, so calculate the offset and add it to the time    
    
    t_gps = Time('1980-01-06 00:00:00')
    t_tai = Time('1993-01-01 00:00:00')
    dt = t_tai - t_gps

# Convert the TAI time into the astropy format
    
    t = Time(bct_raw, format='gps', scale='tai')
    t2 = t + dt

# Output the result
# NOTE: The correct format is utc.iso determined by comparing to the old
#       BlockCenterTime
    
    print(t2.utc.iso)
    
    bct = t2.utc.iso # Similar format to MISR BlockCenterTime
    
    words = bct.split(' ')
    temp = words[0].split('-')
    myear_str = temp[0]
    myear = int(temp[0])
    mmonth = int(temp[1])
    mday = int(temp[2])

    temp = words[1].split(':')
    mhour = int(temp[0])
    mminute = int(temp[1])
    hold = temp[2].split('.')
    msecond = int(hold[0])  #  Note: Not rounding here to nearest second
    
    mhours = float(mhour) + mminute/60. + msecond/3600.
    
    mtime1 = mmonth*10000. + mday*100. + mhours # Eliminate years

# End block
# NOTE: To get agreement with the V22 code, need num_block-2, which suggests that
#       the V22 code is incorrect.

    bct_block = tai_raw[start_block-1 + num_block-2]
    bct_raw = bct_block[16,64]  # Choose the central pixel in the block (arbitrary)

# Convert the TAI time into the astropy format
    
    t = Time(bct_raw, format='gps', scale='tai')
    t2 = t + dt

# Output the result
# NOTE: The correct format is utc.iso determined by comparing to the old
#       BlockCenterTime
    
    print(t2.utc.iso)
    
    bct = t2.utc.iso # Similar format to MISR BlockCenterTime
    
    words = bct.split(' ')
    temp = words[0].split('-')
    myear = int(temp[0])
    mmonth = int(temp[1])
    mday = int(temp[2])

    temp = words[1].split(':')
    mhour = int(temp[0])
    mminute = int(temp[1])
    hold = temp[2].split('.')
    msecond = int(hold[0])  #  Note: Not rounding here to nearest second
    
    mhours = float(mhour) + mminute/60. + msecond/3600.
    
    mtime2 = mmonth*10000. + mday*100. + mhours # Eliminate years

# Calculate the average time
    
    mtime = (mtime1 + mtime2)/2.0 # Calculate average time

# Calculate in time window units    
    
    munits = mtime * (60./time_window) 

### Find and read the correct AERONET NetCDF file

# Set the AERONET filename

    aero_name = "AERONET_"+myear_str+".nc"
    
### READ THE AERONET DATA

    start_time = time.time()

# Change directory to the basepath and get the file list

    os.chdir(datapath)
    file_list = glob.glob(aero_name)

# Choose the first file

    inputName = file_list[0]

# Tell user location in process

    print('Reading: ',inputName)

# Open the NetCDF file

    rootgrp = Dataset(inputName, 'r', format='NETCDF4')

# Assign the variables to an array

    site_name = rootgrp.variables['Site'][:]
    lon = rootgrp.variables['Longitude'][:]
    lat = rootgrp.variables['Latitude'][:]

    year = rootgrp.variables['Year'][:]
    month = rootgrp.variables['Month'][:]
    day = rootgrp.variables['Day'][:]

    hour = rootgrp.variables['Hour'][:]
    minute = rootgrp.variables['Minute'][:]
    second = rootgrp.variables['Second'][:]

    green_poly = rootgrp.variables['MISR_Green_Poly'][:]
    green_line = rootgrp.variables['MISR_Green_Line'][:]

# Close the NetCDF file

    rootgrp.close()

# Print the time

    end_time = time.time()
    print("Time to Read AERONET data was %g seconds" % (end_time - start_time))

### Convert the AERONET times into time units
### Note: This is a fast approach that does not handle day boundaries correctly

    start_time = time.time()

# Convert to hours

    ahours = hour*1. + minute/60. + second/3600.
    atime = month*10000. + day*100. + ahours # Eliminate years

# Convert to time units

    aunits = atime * (60./time_window)

# Print the time

    end_time = time.time()
    print("Time to Convert AERONET data was %g seconds" % (end_time - start_time))

### Match all the data within the image boundary within the time window

    tdiff = abs(aunits - munits)
    
# Extract those within the window (unit value)
    
    found = (tdiff <= 1.0)
    if(np.amax(found) == True):
        aunits_found = aunits[found]
        site_found = site_name[found]
        lon_found = lon[found]
        lat_found = lat[found]
        poly_found = green_poly[found]
        line_found = green_line[found]
        tdiff_found = tdiff[found]
        
# Find the data within the window (lat/lon)

        t1 = (lon_found <= lon_max)
        t2 = (lon_found >= lon_min)
        t3 = (lat_found <= lat_max)
        t4 = (lat_found >= lat_min)
        
        t12 = np.logical_and(t1,t2)
        t34 = np.logical_and(t3,t4)
        check = np.logical_and(t12,t34)
        
        if(np.amax(check) == True):

# Extract the data within the window (block lat/lon)
            
            keep = (check == True)
            
            site_near = site_found[keep]
            lon_near = lon_found[keep]
            lat_near = lat_found[keep]
            poly_near = poly_found[keep]
            line_near = line_found[keep] 
            tdiff_near = tdiff_found[keep]      

# Get the number of unique AERONET locations
# Note: The AERONET stations do not move

            num_unique, indices = np.unique(lon_near, return_index=True)

# Set up arrays to store the matched data

            num_aero = len(indices)

            aero_lat = np.zeros(num_aero)
            aero_lon = np.zeros(num_aero)
            aero_aod = np.zeros(num_aero)
            aero_line = np.zeros(num_aero)
            aero_tdiff = np.zeros(num_aero)
            
            aero_count = 0

# Loop over the unique locations

            for inner in indices:
                
                test_site = site_near[inner]
                test = (site_near == test_site)
                test_lat = lat_near[test]
                test_lon = lon_near[test]
                test_aod = poly_near[test]
                test_line = line_near[test]
                test_tdiff = tdiff_near[test]
                
                min_time = np.amin(test_tdiff)
                match = (test_tdiff == min_time)
                
                aero_lat[aero_count] = test_lat[match]
                aero_lon[aero_count] = test_lon[match]
                aero_aod[aero_count] = test_aod[match]
                aero_line[aero_count] = test_line[match]
                aero_tdiff[aero_count] = min_time
                
                aero_count = aero_count+1
                
# Print some results before plotting

    print()
    print("Number AERONET Sites = ",aero_count)
    out_text = "MAX AERONET AOD = "+"{0:.4f}".format(np.amax(aero_aod))    
    print(out_text)
    print()
        
## MAP THE BEST ESTIMATE DATA

# Set the plot area

    fig = plt.figure(figsize=(12,6), dpi=120)

# Set the title

    plt.title('V'+misr_name+' Best Estimate AOD')
    
# Draw basemap with the appropriate lat/lon boundaries

    m = Basemap(llcrnrlon=lon_min,llcrnrlat=lat_min,urcrnrlon=lon_max,urcrnrlat=lat_max,
        projection='cyl',resolution='i')
    m.drawmapboundary(fill_color='0.3')

## Loop over blocks

    print()
    print("***Best Estimate***")

    for i in range(num_block):

        block = start_block + i - 1 # Block 0-based

# Extract the data for this block

        lat_44 = lat_raw[block,:,:]
        lon_44 = lon_raw[block,:,:]
        aod_01 = rbe_aod[block,:,:]*1.
        succ_01 = success[block,:,:]*1.
        keep = (succ_01 > 0.0)
 
# Test for a successful search        
        
        if(np.amax(keep) == False):
            continue 
        
# Print some information

        print("Block = ",block+1) # MISR-format
        out_text = "   Min AOD = "+"{0:.4f}".format(np.amin(aod_01[keep])) 
        print(out_text)    
        out_text = "Median AOD = "+"{0:.4f}".format(np.median(aod_01[keep]))    
        print(out_text)
        out_text = "  Mean AOD = "+"{0:.4f}".format(np.mean(aod_01[keep]))    
        print(out_text)
        out_text = "   Max AOD = "+"{0:.4f}".format(np.amax(aod_01[keep])) 
        print(out_text)

# Mask locations without valid retrievals

        mask = (succ_01 < 1.0)
        aod_01[mask] = 0.0
            
# Spatial statistics
# Note: First, we resize the arrays from their native resolution to 4 x 4 coarser
#       resolution by summing
#       Then, we calculate the mean AOD value at the coarser resolution using the number
#       of valid retrievals
#       Next, we resize the arrays from the coarser resolution back to their native
#       resolution

        succ_coarse = np.squeeze(succ_01.reshape([y_aero/4,4,x_aero/4,4]).sum(3).sum(1))
        aod_coarse = np.squeeze(aod_01.reshape([y_aero/4,4,x_aero/4,4]).sum(3).sum(1))
        
        denom = succ_coarse
        denom[succ_coarse < 1.0] = 1.0
        mean_aod_coarse = aod_coarse/denom
        
        mean_aod_fine = np.repeat(np.repeat(mean_aod_coarse,4,axis=0),
            4,axis=1)
        
# Calculate the relative sampling of the original resolution data to the coarse
# resolution data
        
        keep = (succ_coarse > 1.0)
        rel_sample = (np.mean(succ_coarse[keep])/16.0) # Denominator is from 4 x 4
        out_text = " Relative Sampling (0.0 to 1.0) = "+"{0:.4f}".format(rel_sample) 
        print(out_text)
        
# Calculate the correlation between the new fine array and the original array

        aod_01_flat = aod_01.flatten() # 1-D array
        mean_aod_fine_flat = mean_aod_fine.flatten() # 1-D array
        
        temp = np.corrcoef(aod_01_flat,mean_aod_fine_flat)
        rel_corr = temp[0,1]
        
        out_text = "Relative Correlation (-1 to +1) = "+"{0:.4f}".format(rel_corr) 
        print(out_text)
                
# Plot the data

        img = aod_01

        im = m.pcolormesh(lon_44,lat_44,img,shading='flat',
            cmap=plt.cm.CMRmap_r,latlon=True,vmin=aod_plot_min,vmax=aod_plot_max)

# Plot the AERONET points

        m.scatter(aero_lon,aero_lat,marker='o',c=aero_aod,
            cmap=plt.cm.CMRmap_r,vmin=aod_plot_min,vmax=aod_plot_max,s=25)

# Add the coastlines

#coast_color = 'blue'
    coast_color = 'green'
    m.drawcoastlines(color=coast_color,linewidth=0.8)

# Add the colorbar

#    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
#        ticks=[0,0.05,0.10,0.15,0.20])
    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
        ticks=np.arange(aod_plot_ticks)*aod_plot_step)
        
    cb.set_label('Green Band AOD')

# Save the figure

    os.chdir(figpath)
    outname = 'AOD_BE_Map'+out_base
    plt.savefig(outname,dpi=120)
        
## MAP THE LOWEST RESIDUAL DATA

# Set the plot area

    fig = plt.figure(figsize=(12,6), dpi=120)

# Set the title

    plt.title('V'+misr_name+' Lowest Residual AOD')

# Draw basemap

    m = Basemap(llcrnrlon=lon_min,llcrnrlat=lat_min,urcrnrlon=lon_max,urcrnrlat=lat_max,
        projection='cyl',resolution='i')
    m.drawmapboundary(fill_color='0.3')
    
## Loop over blocks

    print()
    print("***Lowest Residual***")

    for i in range(num_block):

        block = start_block + i - 1 # Block 0-based

# Extract the data

        lat_44 = lat_raw[block,:,:]
        lon_44 = lon_raw[block,:,:]
        aod_01 = rbe_aod[block,:,:]*1.
        succ_01 = success[block,:,:]*1.
        mix_aod = aod_per_mix[block,:,:,:]*1. # Additional mixture dimension
        mix_chisq = chisq_per_mix[block,:,:,:]*1.  # Additional mixture dimension
        
        keep = (succ_01 > 0.0)
        
# Test for a successful search        
        
        if(np.amax(keep) == False):
            continue         
    
        new_chisq_01 = np.zeros_like(aod_01)
        new_aod_01 = np.zeros_like(aod_01)

# Loop through the block and find the location with the lowest (valid) chi-squared
# value and store the chi-squared value and associated AOD
    
        for y in range(y_aero):
            for x in range(x_aero):
                hold = mix_chisq[y,x,:]
                test = np.copy(hold)
                if(np.amax(test) > 0.0):
                    test[hold < 0.0] = 9999.9
                    check = np.argmin(test)
                    new_chisq_01[y,x] = np.amin(test)
                    temp = mix_aod[y,x,:]
                    new_aod_01[y,x] = temp[check]

# Print some information

        print("Block = ",block+1) # MISR-format
        out_text = "   Min AOD = "+"{0:.4f}".format(np.amin(new_aod_01[keep])) 
        print(out_text)  
        out_text = "Median AOD = "+"{0:.4f}".format(np.median(new_aod_01[keep]))    
        print(out_text)
        out_text = "  Mean AOD = "+"{0:.4f}".format(np.mean(new_aod_01[keep]))    
        print(out_text)
        out_text = "   Max AOD = "+"{0:.4f}".format(np.amax(new_aod_01[keep])) 
        print(out_text)

# Mask locations without valid retrievals

        mask = (succ_01 < 1.0)
        new_aod_01[mask] = 0.0

# Spatial statistics
# Note: First, we resize the arrays from their native resolution to 4 x 4 coarser
#       resolution by summing
#       Then, we calculate the mean AOD value at the coarser resolution using the number
#       of valid retrievals
#       Next, we resize the arrays from the coarser resolution back to their native
#       resolution

        succ_coarse = np.squeeze(succ_01.reshape([y_aero/4,4,x_aero/4,4]).sum(3).sum(1))
        aod_coarse = np.squeeze(new_aod_01.reshape([y_aero/4,4,x_aero/4,4]).sum(3).sum(1))
        
        denom = succ_coarse
        denom[succ_coarse < 1.0] = 1.0
        mean_aod_coarse = aod_coarse/denom
        
        mean_aod_fine = np.repeat(np.repeat(mean_aod_coarse,4,axis=0),
            4,axis=1)
        
# Calculate the relative sampling of the original resolution data to the coarse
# resolution data
        
        keep = (succ_coarse > 1.0)
        rel_sample = (np.mean(succ_coarse[keep])/16.0) # Denominator is from 4 x 4
        out_text = " Relative Sampling (0.0 to 1.0) = "+"{0:.4f}".format(rel_sample) 
        print(out_text)
        
# Calculate the correlation between the new fine array and the original array

        aod_01_flat = new_aod_01.flatten() # 1-D array
        mean_aod_fine_flat = mean_aod_fine.flatten() # 1-D array
        
        temp = np.corrcoef(aod_01_flat,mean_aod_fine_flat)
        rel_corr = temp[0,1]
        
        out_text = "Relative Correlation (-1 to +1) = "+"{0:.4f}".format(rel_corr) 
        print(out_text)  
            
# Plot the data

        img = new_aod_01

        im = m.pcolormesh(lon_44,lat_44,img,shading='flat',
            cmap=plt.cm.CMRmap_r,latlon=True,vmin=aod_plot_min,vmax=aod_plot_max)

# Plot the AERONET points

        m.scatter(aero_lon,aero_lat,marker='o',c=aero_aod,
            cmap=plt.cm.CMRmap_r,vmin=aod_plot_min,vmax=aod_plot_max,s=25)

# Add the coastlines

#coast_color = 'blue'
    coast_color = 'green'
    m.drawcoastlines(color=coast_color,linewidth=0.8)

# Add the colorbar

#    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
#        ticks=[0,0.05,0.10,0.15,0.20])
    cb = m.colorbar(im,"bottom", size="10%", pad="5%", 
        ticks=np.arange(aod_plot_ticks)*aod_plot_step)
        
    cb.set_label('Green Band AOD')

# Save the figure

    os.chdir(figpath)
    outname = 'AOD_LR_Map'+out_base
    plt.savefig(outname,dpi=120)  
    
## CALCULATE AND PLOT THE REGRESSIONS AGAINST AERONET

# Extract the navigation for all the blocks

    lat_44 = lat_raw[(start_block-1):(start_block-1+num_block),:,:]
    lon_44 = lon_raw[(start_block-1):(start_block-1+num_block),:,:]
    
# Extract the aerosol data for all the blocks

    rbe_raw_44 = rbe_aod[(start_block-1):(start_block-1+num_block),:,:]*1.
    succ_44 = success[(start_block-1):(start_block-1+num_block),:,:]*1.
    mix_aod = aod_per_mix[(start_block-1):(start_block-1+num_block),:,:,:]*1.
    mix_chisq = chisq_per_mix[(start_block-1):(start_block-1+num_block),:,:,:]*1.

# Loop through the blocks and find the location with the lowest (valid) chi-squared
# value and store the chi-squared value and associated AOD
    
    rlr_raw_44 = np.zeros_like(rbe_raw_44)
    rlr_chi2_44 = np.zeros_like(rbe_raw_44)
    rlr_mix_44 = np.zeros_like(rbe_raw_44)
    
    for b in range(num_block):
        for y in range(y_aero):
            for x in range(x_aero):
                hold = mix_chisq[b,y,x,:]
                test = np.copy(hold)
                if(np.amax(test) > 0.0):
                    test[hold < 0.0] = 9999.9
                    check = np.argmin(test)
                    temp = mix_aod[b,y,x,:]
                    rlr_raw_44[b,y,x] = temp[check]
                    rlr_chi2_44[b,y,x] = np.amin(test)
                    rlr_mix_44[b,y,x] = check+1  # Mixture index is 1-based

# Eliminate locations without a valid retrieval
# Note: These arrays were accumulated in the Lowest Residual processing step

    rbe_44 = rbe_raw_44 * succ_44
    rlr_44 = rlr_raw_44 * succ_44
    chi2_44 = rlr_chi2_44 * succ_44
    mix_44 = rlr_mix_44 * succ_44

# Get the number of AERONET sites
    
    num_aero = len(aero_aod)

# Set up arrays to store the matched data

    aero_match = np.zeros(num_aero)
    rbe_v22_match = np.zeros(num_aero)
    rlr_v22_match = np.zeros(num_aero)
    rlr_chi2_match = np.zeros(num_aero)
    rlr_mix_match = np.zeros(num_aero)
    dist_match = np.zeros(num_aero)
    time_match = np.zeros(num_aero)
    lon_match = np.zeros(num_aero)
    lat_match = np.zeros(num_aero)

# Loop over AERONET retrievals

    for i in range(num_aero):

# Extract AERONET information
    
        tlat = aero_lat[i]
        tlon = aero_lon[i]
        taod = aero_aod[i]
        ttim = aero_tdiff[i]

# Calculate the array of distances (in km) from the AERONET point vs. the MISR data
        
        dist = Haversine_Distance(tlat,tlon,lat_44,lon_44)
        
# Find the minimum distance (km)

        min_dist = np.amin(dist)
    
# Test for a match within a single (4.4 km) MISR pixel

        if(min_dist <= 4.4):

# Extract the match

            match = (dist == min_dist)   
            
# Extract the MISR data

            aero_match[i] = taod
            rbe_v22_match[i] = rbe_44[match]
            rlr_v22_match[i] = rlr_44[match]
            rlr_chi2_match[i] = chi2_44[match]
            rlr_mix_match[i] = mix_44[match]
            dist_match[i] = min_dist
            time_match[i] = ttim
            lat_match[i] = tlat
            lon_match[i] = tlon

# Plot the data

    max_val = aod_plot_max

# Set the plot area
# NOTE: The base plot size is 6 x 6, so a 2 row, 3 column set would be 18 x 12

    plt.figure(figsize=(12,6), dpi=120)

## Linear plot (Best Estimate)

    good = (rbe_v22_match > 0)
    ref_aod = aero_match[good]
    test_aod = rbe_v22_match[good]

    plt.subplot(1, 2, 1)
    plt.scatter(ref_aod,test_aod,marker='o',color='black',s=25)   
    plt.title('V'+misr_name+' Best Estimate')

# Plot the one-to-one line

    plt.plot([0.0,max_val], [0.0,max_val], color="k", lw=1)

# Plot the envelopes

    dummy_aod = np.logspace(-4,1,num=100)
    up1_aod = 1.20*dummy_aod
    up2_aod = dummy_aod+0.05
    upper_aod = np.maximum(up1_aod,up2_aod)

    lo1_aod = 0.80*dummy_aod
    lo2_aod = dummy_aod-0.05
    lower_aod = np.minimum(lo1_aod,lo2_aod)

    plt.plot(dummy_aod,lower_aod,color="0.75", lw=1)
    plt.plot(dummy_aod,upper_aod,color="0.75", lw=1)

# Set the limits and axis labels

    plt.xlim(0.0,max_val)
    plt.ylim(0.0,max_val)

    plt.xlabel('AERONET AOD')
    plt.ylabel('MISR AOD')

    plt.grid(True)

# Include some text on the Best Estimate Figure

    x_pos = (aod_plot_max/2.) + (aod_plot_max/10.)
    y_pos1 = (aod_plot_max/10.)
    y_pos2 = y_pos1 + (aod_plot_max/30.)
    y_pos3 = y_pos2 + (aod_plot_max/30.)
    y_pos4 = y_pos3 + (aod_plot_max/30.)
    y_pos5 = y_pos4 + (aod_plot_max/30.)
    y_pos6 = y_pos5 + (aod_plot_max/30.)
    
    plt.text(x_pos,y_pos6,'Best Estimate',fontsize=12) # Version

    count = len(test_aod)
    out_text = 'N = '+str(count)
    plt.text(x_pos,y_pos5,out_text,fontsize=10) # Count

    temp = np.corrcoef(ref_aod,test_aod)
    be_r = temp[0,1]
    out_text = 'r = '+"{0:.4f}".format(be_r)
    plt.text(x_pos,y_pos4,out_text,fontsize=10) # Correlation coefficient

    rmse = np.sqrt(((test_aod - ref_aod) ** 2).mean())
    out_text = 'RMSE = '+"{0:.4f}".format(rmse)
    plt.text(x_pos,y_pos3,out_text,fontsize=10) # Root mean squared error

    diff = test_aod - ref_aod
    bias = np.mean(diff)
    out_text = 'Bias = '+"{0:.4f}".format(bias)
    plt.text(x_pos,y_pos2,out_text,fontsize=10) # Bias

    offset = np.ones_like(ref_aod)*0.05
    inner = np.absolute(diff) < np.maximum(offset,ref_aod*0.2)
    in_frac = (np.sum(inner)/(1.0*count))*100.0
    out_text = 'Percent In = '+"{0:.2f}".format(in_frac)
    plt.text(x_pos,y_pos1,out_text,fontsize=10) # Percent in envelope

## Linear plot (Lowest Residual)

    good = (rlr_v22_match > 0)
    ref_aod = aero_match[good]
    test_aod = rlr_v22_match[good]

    plt.subplot(1, 2, 2)
    plt.scatter(ref_aod,test_aod,marker='o',color='black',s=25)   
    plt.title('V'+misr_name+' Lowest Residual')

# Plot the one-to-one line

    plt.plot([0.0,max_val], [0.0,max_val], color="k", lw=1)

# Plot the envelopes

    dummy_aod = np.logspace(-4,1,num=100)
    up1_aod = 1.20*dummy_aod
    up2_aod = dummy_aod+0.05
    upper_aod = np.maximum(up1_aod,up2_aod)

    lo1_aod = 0.80*dummy_aod
    lo2_aod = dummy_aod-0.05
    lower_aod = np.minimum(lo1_aod,lo2_aod)

    plt.plot(dummy_aod,lower_aod,color="0.75", lw=1)
    plt.plot(dummy_aod,upper_aod,color="0.75", lw=1)

# Set the limits and axis labels

    plt.xlim(0.0,max_val)
    plt.ylim(0.0,max_val)

    plt.xlabel('AERONET AOD')
    plt.ylabel('MISR AOD')

    plt.grid(True)

# Include some text on the Lowest Residual Figure

    plt.text(x_pos,y_pos6,'Lowest Resid',fontsize=12) # Version

    count = len(test_aod)
    out_text = 'N = '+str(count)
    plt.text(x_pos,y_pos5,out_text,fontsize=10) # Count

    temp = np.corrcoef(ref_aod,test_aod)
    be_r = temp[0,1]
    out_text = 'r = '+"{0:.4f}".format(be_r)
    plt.text(x_pos,y_pos4,out_text,fontsize=10) # Correlation coefficient

    rmse = np.sqrt(((test_aod - ref_aod) ** 2).mean())
    out_text = 'RMSE = '+"{0:.4f}".format(rmse)
    plt.text(x_pos,y_pos3,out_text,fontsize=10) # Root mean squared error

    diff = test_aod - ref_aod
    bias = np.mean(diff)
    out_text = 'Bias = '+"{0:.4f}".format(bias)
    plt.text(x_pos,y_pos2,out_text,fontsize=10) # Bias

    offset = np.ones_like(ref_aod)*0.05
    inner = np.absolute(diff) < np.maximum(offset,ref_aod*0.2)
    in_frac = (np.sum(inner)/(1.0*count))*100.0
    out_text = 'Percent In = '+"{0:.2f}".format(in_frac)
    plt.text(x_pos,y_pos1,out_text,fontsize=10) # Percent in envelope

# Save the figure

    os.chdir(figpath)
    outname = 'AOD_Regression'+out_base
    plt.savefig(outname,dpi=120)

# Show the plot

    plt.show()
#    print(error)

### Write out the data to a NetCDF file

# Extract the data

    good = (rlr_v22_match > 0)
    lat_good = lat_match[good]
    lon_good = lon_match[good]
    aero_good = aero_match[good]
    rbe_good = rbe_v22_match[good]
    rlr_good = rlr_v22_match[good]
    chi2_good = rlr_chi2_match[good]
    mix_good = rlr_mix_match[good]
    dist_good = dist_match[good]
    time_good = time_match[good]

    if(exist == 0):  #  Create the initial file

# Open the file
        
        n_id = Dataset(outfile,'w')
        
# Define the dimensions

        xdim = n_id.createDimension('xdim') # Unlimited        

# Define the output variables

        out01 = n_id.createVariable('Latitude','f4',('xdim',),zlib=True)
        out02 = n_id.createVariable('Longitude','f4',('xdim',),zlib=True)
        
        out03 = n_id.createVariable('AERONET_AOD','f4',('xdim',),zlib=True)
        out04 = n_id.createVariable('MISR_BE_AOD','f4',('xdim',),zlib=True)
        out05 = n_id.createVariable('MISR_LR_AOD','f4',('xdim',),zlib=True)
        
        out06 = n_id.createVariable('MISR_LR_CHI2','f4',('xdim',),zlib=True)
        out07 = n_id.createVariable('MISR_LR_MIX','f4',('xdim',),zlib=True)
        
        out08 = n_id.createVariable('Delta_T','f4',('xdim',),zlib=True)
        out09 = n_id.createVariable('Distance','f4',('xdim',),zlib=True)
        
# Put the data into the output

        out01[:] = lat_good 
        out02[:] = lon_good
                 
        out03[:] = aero_good
        out04[:] = rbe_good         
        out05[:] = rlr_good
        
        out06[:] = chi2_good
        out07[:] = mix_good
    
        out08[:] = time_good
        out09[:] = dist_good         
      
# Close the NetCDF file
        
        n_id.close()
            
## Append to existing file
            
    else:    #  File exists    

# Get the number of new entries

        num_good = len(lon_good)
        
# Now open the existing file and append information

        n_id = Dataset(outfile,'a')

# Choose variables
        
        var01 = n_id.variables['Latitude']
        var02 = n_id.variables['Longitude']
        
        var03 = n_id.variables['AERONET_AOD']
        var04 = n_id.variables['MISR_BE_AOD']
        var05 = n_id.variables['MISR_LR_AOD']
        
        var06 = n_id.variables['MISR_LR_CHI2']
        var07 = n_id.variables['MISR_LR_MIX']
        
        var08 = n_id.variables['Delta_T']
        var09 = n_id.variables['Distance']
            
# Get the current size of the variables            
            
        current = len(var01)
            
# Append the information

        var01[current:current+num_good] = lat_good
        var02[current:current+num_good] = lon_good
        
        var03[current:current+num_good] = aero_good
        var04[current:current+num_good] = rbe_good
        var05[current:current+num_good] = rlr_good
        
        var06[current:current+num_good] = chi2_good
        var07[current:current+num_good] = mix_good
        
        var08[current:current+num_good] = time_good
        var09[current:current+num_good] = dist_good
       
# Close the file            
        
        n_id.close()    
    
# Print the time

    all_end_time = time.time()
    print("Total elapsed time was %g seconds" % (all_end_time - all_start_time))

# Tell user completion was successful

    print("\nSuccessful Completion\n")
Example #57
0
File: hdf.py Project: CMDA-CMU/CMDA
class HdfFile(object):
    """Class implementing HDF file access."""
    
    GEOLOC_FIELDS = ()
    
    def __init__(self, file):
        """Constructor."""
        self.file = file
        self.hdf = None
        self.vs = None
        self.vdinfo = None
        self.sd = None
        self.savedVarsDict = None
        self.open()
        
        # Permit data files without VData's
        if type(self.vdinfo) != type(None):
            self.vdList = [i[0] for i in self.vdinfo]
            #print self.vdList
        
        # Permit data files without SDS's
        if type(self.sd) != type(None):
            self.datasetList = self.sd.datasets().keys()
            #print self.datasetList

        self.levels    = {}
        self.geoDict   = self._getGeoDict()
        self.dataDict  = self._getDataDict()
        self.close()
        
    # Always define in subclass
    def _getGeoDict(self): raise NotImplementedError("Not implemented.")
    # Always define in subclass, except for cloudsat
    def _getDataDict(self): return None

    def open(self):
        """Open for reading."""
        
        if self.hdf is None:
            self.hdf = HDF(self.file)
            self.vs = self.hdf.vstart()
            # Ignore exceptions telling us there are no VData's
            try:
                self.vdinfo = self.vs.vdatainfo()
            except HDF4Error:
                pass
            # Ignore exceptions telling us there are no SDS's
            try:
                self.sd = SD(self.file)
            except HDF4Error:
                pass
    
    def close(self):
        """Close hdf file."""
        
        if hasattr(self, 'hdf') and self.hdf is not None:
            self.vs.end()
            self.hdf.close()
            self.sd.end()
            self.hdf = None
            self.vs = None
            self.vdinfo = None
            self.sd = None
    
    def getGeo(self): return self.geoDict
    
    def get(self, var):
        """Return variable array dict."""
        
        self.open()
        
        #get list of vars
        if isinstance(var, types.StringTypes): var = [var]
        elif isinstance(var, (types.ListType, types.TupleType)): pass
        elif var is None:
            # If we don't have any SDS's, go with the vdata's only
            # if we don't have any vdata's go with SDS's only
            try:
                var = self.vdList;
                try:
                    var.extend(self.datasetList)
                except AttributeError:
                    pass
            except AttributeError:
                var = self.datasetList                
        else: raise RuntimeError("Incorrect argument type for %s." % var)
        
        #create dict of (attrs, array) for each var
        a = {}
        for v in var:
            if v=='': continue # added by bytang
            #handle SD types
            ds = None
            if v in self.datasetList:
                try:
                    ds = self.sd.select(v)
                    a[v] = (ds.attributes(), N.array(ds.get()))
                    continue
                except HDF4Error, e: pass
                finally:
                    if ds is not None: ds.endaccess()
Example #58
0

ds = mod06.datasets()
ds_lst = ds.keys()

for i in range(len(ds_lst)):
	print ds_lst[i]



#Metadata for Cloud Optical Thickness
sds_name = "Cloud_Optical_Thickness"
sds = mod06.select(sds_name)

data = sds.attributes(full=1)
data_keys_lst = data.keys()
print "\n"
print "**************************************\n"
print "CLOUD OPTICAL THICKNESS METADATA\n"
print "======================================\n"
for key in data_keys_lst:
	print key + ":"
	print data[key]
	print "\n"
print "=======================================\n"



mod06.end()

Example #59
0
def load(satscene, **kwargs):
    """Read data from file and load it into *satscene*.  Load data into the
    *channels*. *Channels* is a list or a tuple containing channels we will
    load data into. If None, all channels are loaded.
    """    
    del kwargs

    conf = ConfigParser()
    conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
    options = {}
    for option, value in conf.items(satscene.instrument_name+"-level3",
                                    raw = True):
        options[option] = value

    pathname = os.path.join(options["dir"], options['filename'])    
    filename = satscene.time_slot.strftime(pathname)
    
    for prodname in GEO_PHYS_PRODUCTS + FLAGS_QUALITY:
        if prodname in satscene.channels_to_load:
            
            prod_chan = ModisEosHdfLevel2(prodname)
            prod_chan.read(filename)
            prod_chan.satid = satscene.satname.capitalize()
            prod_chan.resolution = 1000.0
            prod_chan.shape = prod_chan.data.shape

            # All this for the netCDF writer:
            prod_chan.info['var_name'] = prodname
            prod_chan.info['var_data'] = prod_chan.data
            resolution_str = str(int(prod_chan.resolution))+'m'
            prod_chan.info['var_dim_names'] = ('y'+resolution_str,
                                               'x'+resolution_str)
            prod_chan.info['long_name'] = prod_chan.attr['long_name'][:-1]
            try:
                prod_chan.info['standard_name'] = prod_chan.attr['standard_name'][:-1]
            except KeyError:
                pass
            valid_min = np.min(prod_chan.data)
            valid_max = np.max(prod_chan.data)
            prod_chan.info['valid_range'] = np.array([valid_min, valid_max])
            prod_chan.info['resolution'] = prod_chan.resolution

            if prodname == 'l2_flags':
                # l2 flags definitions
                for i in range(1, 33):
                    key =  "f%02d_name"%i
                    prod_chan.info[key] = prod_chan.attr[key][:-1]

            satscene.channels.append(prod_chan)
            if prodname in CHANNELS:
                satscene[prodname].info['units'] = '%'
            else:
                satscene[prodname].info['units'] = prod_chan.attr['units'][:-1]

            LOG.info("Loading modis lvl2 product '%s' done"%prodname)

    # Check if there are any bands to load:
    channels_to_load = False
    for bandname in CHANNELS:
        if bandname in satscene.channels_to_load:
            channels_to_load = True
            break

    if channels_to_load:
        #print "FILE: ", filename
        eoshdf = SD(filename)
        # Get all the Attributes:
        # Common Attributes, Data Time,
        # Data Structure and Scene Coordinates
        info = {}
        for key in eoshdf.attributes().keys():
            info[key] = eoshdf.attributes()[key]

        dsets = eoshdf.datasets()
        selected_dsets = []

        for bandname in CHANNELS:
            if (bandname in satscene.channels_to_load and
                bandname in dsets):

                value = eoshdf.select(bandname)
                selected_dsets.append(value)
        
                # Get only the selected datasets
                attr = value.attributes()
                band = value.get()

                nodata = attr['bad_value_scaled']
                mask = np.equal(band, nodata)
                satscene[bandname] = (np.ma.masked_where(mask, band) * 
                                      attr['slope'] + attr['intercept'])

                satscene[bandname].info['units'] = '%'
                satscene[bandname].info['long_name'] = attr['long_name'][:-1]

        for dset in selected_dsets:
            dset.endaccess()  

        LOG.info("Loading modis lvl2 Remote Sensing Reflectances done")
        eoshdf.end()


    lat, lon = get_lat_lon(satscene, None)

    from pyresample import geometry
    satscene.area = geometry.SwathDefinition(lons=lon, lats=lat)

    #print "Variant: ", satscene.variant 
    satscene.variant = 'regional' # Temporary fix!

    LOG.info("Loading modis data done.")
Example #60
0
dl_data[(rad_sol <= -1)] = day1

day2 = rad_sol[(rad_sol < 1) & (rad_sol > -1)] 
day2 = 24.0 * np.arccos(day2)/np.pi
dl_data[(rad_sol < 1) & (rad_sol > -1)] = day2

day3 = rad_sol[(rad_sol >= 1)]
day3 = 0.0
dl_data[(rad_sol >= 1)] = day3

dl_data = dl_data.reshape(1080,2160)
print "day length complete"

####################################################################
#   Calculates NPP for each pixel by multiplying values for that   #
#   pixel from pbopt, zeu, tot_chl, par and dl together.           #
#   Produces output hdf file containing NPP data.                  #
####################################################################  

NPP_data = np.zeros_like(dl_data)

NPP_data = pbopt_data * zeu_data * chl_data * par_data * dl_data 

OUTPUT1 = "C:\Anaconda\envs\sat_data\NPP_3.hdf"
NPP = SD(OUTPUT1, SDC.WRITE | SDC.CREATE)
sds = NPP.create("sds1", SDC.FLOAT64,(1080,2160))
sds.setfillvalue(0)
sds[:] = NPP_data
sds.endaccess()
NPP.end()