def load_amsr(datfile, lonlatfile): """ Name: load_amsr Purpose: to load amsr data into a sucinct dictionary Calling Sequence: amsr = load_amsr(datfile,lonlatfile) Input: datfile: path and name of hdf file lonlatfile: path and name of hdf files for lat and lon Output: amsr: dictionary with numpy array of values Keywords: none Dependencies: gdal numpy gc: for clearing the garbage Sp_parameters for progress issuer pdb: for debugging load_modis: this file Required files: dat file lonlat file Example: ... Modification History: Written (v1.0): Samuel LeBlanc, 2015-05-04, NASA Ames """ import os if not (os.path.isfile(datfile)): error('Data file not found!') if not (os.path.isfile(lonlatfile)): error('Lonlat file not found!') import numpy as np from load_modis import load_hdf from osgeo import gdal gdat = gdal.Open(datfile) dat = dict() dat['nfo'] = gdat.GetMetadata() dat['ice'] = gdat.GetRasterBand(1).ReadAsArray() datll, dicll = load_hdf(lonlatfile, values=(('lon', 0), ('lat', 1)), verbose=False) dat['lat'] = datll['lat'] dat['lon'] = datll['lon'] return dat
def load_apr(datfiles): """ Name: load_apr Purpose: Function to load apr values of zenith dbz from the various files in the datfiles list Calling Sequence: aprout = load_apr(datfiles) Input: datfiles name (list of .h4 files to combine) Output: aprout: dbz, zenith radar reflectivity latz, latitude of the zenith reflectivity lonz, longitude of the " altflt, actual altitude in the atmosphere of the radar refl. utc, time of measurement in utc fractional hours Keywords: none Dependencies: os numpy load_modis datetime Required files: hdf APR-2 files from SEAC4RS Example: ... Modification History: Written (v1.0): Samuel LeBlanc, 2015-04-10, NASA Ames """ import os import numpy as np from load_modis import load_hdf import datetime first = True for f in datfiles: print 'Running file: ', f if not (os.path.isfile(f)): print 'Problem with file:', f print ' ... Skipping' continue apr_value = (('lat', 16), ('lon', 17), ('alt', 15), ('time', 13), ('dbz', 0), ('lat3d', 30), ('lon3d', 31), ('alt3d', 32), ('lat3d_o', 24), ('lon3d_o', 25), ('alt3d_o', 26), ('lat3d_s', 27), ('lon3d_s', 28), ('alt3d_s', 29)) apr, aprdicts = load_hdf(f, values=apr_value, verbose=False) # transform the 3d latitudes, longitudes, and altitudes to usable values apr['latz'] = apr['lat3d'] / apr['lat3d_s'] + apr['lat3d_o'] apr['lonz'] = apr['lon3d'] / apr['lon3d_s'] + apr['lon3d_o'] apr['altz'] = apr['alt3d'] / apr['alt3d_s'] + apr['alt3d_o'] apr['altflt'] = np.copy(apr['altz']) try: for z in range(apr['altz'].shape[0]): apr['altflt'][ z, :, :] = apr['altz'][z, :, :] + apr['alt'][z, :] except: print 'Problem with file: ', f print ' ... dimensions do not agree' print ' ... Skipping' continue izen = apr['altz'][:, 0, 0].argmax() #get the index of zenith if first: aprout = dict() aprout['dbz'] = apr['dbz'][izen, :, :] aprout['altflt'] = apr['altz'][izen, :, :] + apr['alt'][izen, :] aprout['latz'] = apr['latz'][izen, :, :] aprout['lonz'] = apr['lonz'][izen, :, :] v = datetime.datetime.utcfromtimestamp(apr['time'][izen, 0]) aprout['utc'] = ( apr['time'][izen, :] - (datetime.datetime(v.year, v.month, v.day, 0, 0, 0) - datetime.datetime(1970, 1, 1)).total_seconds()) / 3600. first = False else: aprout['dbz'] = np.concatenate( (aprout['dbz'].T, apr['dbz'][izen, :, :].T)).T aprout['altflt'] = np.concatenate( (aprout['altflt'].T, (apr['altz'][izen, :, :] + apr['alt'][izen, :]).T)).T aprout['latz'] = np.concatenate( (aprout['latz'].T, apr['latz'][izen, :, :].T)).T aprout['lonz'] = np.concatenate( (aprout['lonz'].T, apr['lonz'][izen, :, :].T)).T v = datetime.datetime.utcfromtimestamp(apr['time'][izen, 0]) utc = (apr['time'][izen, :] - (datetime.datetime(v.year, v.month, v.day, 0, 0, 0) - datetime.datetime(1970, 1, 1)).total_seconds()) / 3600. aprout['utc'] = np.concatenate((aprout['utc'].T, utc.T)).T print aprout.keys() print 'Loaded data points: ', aprout['utc'].shape return aprout
def load_apr(datfiles): """ Name: load_apr Purpose: Function to load apr values of zenith dbz from the various files in the datfiles list Calling Sequence: aprout = load_apr(datfiles) Input: datfiles name (list of .h4 files to combine) Output: aprout: dbz, zenith radar reflectivity latz, latitude of the zenith reflectivity lonz, longitude of the " altflt, actual altitude in the atmosphere of the radar refl. utc, time of measurement in utc fractional hours Keywords: none Dependencies: os numpy load_modis datetime Required files: hdf APR-2 files from SEAC4RS Example: ... Modification History: Written (v1.0): Samuel LeBlanc, 2015-04-10, NASA Ames """ import os import numpy as np from load_modis import load_hdf import datetime first = True for f in datfiles: print 'Running file: ',f if not(os.path.isfile(f)): print 'Problem with file:', f print ' ... Skipping' continue apr_value = (('lat',16),('lon',17),('alt',15),('time',13),('dbz',0),('lat3d',30),('lon3d',31),('alt3d',32),('lat3d_o',24),('lon3d_o',25),('alt3d_o',26),('lat3d_s',27),('lon3d_s',28),('alt3d_s',29)) apr,aprdicts = load_hdf(f,values=apr_value,verbose=False) # transform the 3d latitudes, longitudes, and altitudes to usable values apr['latz'] = apr['lat3d']/apr['lat3d_s']+apr['lat3d_o'] apr['lonz'] = apr['lon3d']/apr['lon3d_s']+apr['lon3d_o'] apr['altz'] = apr['alt3d']/apr['alt3d_s']+apr['alt3d_o'] apr['altflt'] = np.copy(apr['altz']) try: for z in range(apr['altz'].shape[0]): apr['altflt'][z,:,:] = apr['altz'][z,:,:]+apr['alt'][z,:] except: print 'Problem with file: ',f print ' ... dimensions do not agree' print ' ... Skipping' continue izen = apr['altz'][:,0,0].argmax() #get the index of zenith if first: aprout = dict() aprout['dbz'] = apr['dbz'][izen,:,:] aprout['altflt'] = apr['altz'][izen,:,:]+apr['alt'][izen,:] aprout['latz'] = apr['latz'][izen,:,:] aprout['lonz'] = apr['lonz'][izen,:,:] v = datetime.datetime.utcfromtimestamp(apr['time'][izen,0]) aprout['utc'] = (apr['time'][izen,:]-(datetime.datetime(v.year,v.month,v.day,0,0,0)-datetime.datetime(1970,1,1)).total_seconds())/3600. first = False else: aprout['dbz'] = np.concatenate((aprout['dbz'].T,apr['dbz'][izen,:,:].T)).T aprout['altflt'] = np.concatenate((aprout['altflt'].T,(apr['altz'][izen,:,:]+apr['alt'][izen,:]).T)).T aprout['latz'] = np.concatenate((aprout['latz'].T,apr['latz'][izen,:,:].T)).T aprout['lonz'] = np.concatenate((aprout['lonz'].T,apr['lonz'][izen,:,:].T)).T v = datetime.datetime.utcfromtimestamp(apr['time'][izen,0]) utc = (apr['time'][izen,:]-(datetime.datetime(v.year,v.month,v.day,0,0,0)-datetime.datetime(1970,1,1)).total_seconds())/3600. aprout['utc'] = np.concatenate((aprout['utc'].T,utc.T)).T print aprout.keys() print 'Loaded data points: ', aprout['utc'].shape return aprout
def load_amsr(datfile,lonlatfile): """ Name: load_amsr Purpose: to load amsr data into a sucinct dictionary Calling Sequence: amsr = load_amsr(datfile,lonlatfile) Input: datfile: path and name of hdf file lonlatfile: path and name of hdf files for lat and lon Output: amsr: dictionary with numpy array of values Keywords: none Dependencies: gdal numpy gc: for clearing the garbage Sp_parameters for progress issuer pdb: for debugging load_modis: this file Required files: dat file lonlat file Example: ... Modification History: Written (v1.0): Samuel LeBlanc, 2015-05-04, NASA Ames """ import os if not(os.path.isfile(datfile)): error('Data file not found!') if not(os.path.isfile(lonlatfile)): error('Lonlat file not found!') import numpy as np from load_modis import load_hdf from osgeo import gdal gdat = gdal.Open(datfile) dat = dict() dat['nfo'] = gdat.GetMetadata() dat['ice'] = gdat.GetRasterBand(1).ReadAsArray() datll,dicll = load_hdf(lonlatfile,values=(('lon',0),('lat',1)),verbose=False) dat['lat'] = datll['lat'] dat['lon'] = datll['lon'] return dat
# Import eMAS values # <codecell> if 'lm' in locals(): reload(lm) from load_modis import load_emas, load_hdf # <codecell> emas_file = fp+'er2/20130913/EMASL2_13965_13_20130913_1905_1918_V00.hdf' print os.path.isfile(emas_file) # <codecell> emas,emas_dicts = load_hdf(emas_file) # <codecell> emas_values = (('lat',0),('lon',1),('tau',15),('ref',23),('phase',58),('layer',59),('qa',68)) emas,emas_dicts = load_hdf(emas_file,values=emas_values) # <codecell> plt.figure() plt.plot(emas['tau']) # <markdowncell> # Now Redo the load of emas data, but with the new V01 files, which includes the newest calibration as of 20150122, which is considered final for SEAC4RS. thermal band revisions (3.7 um and higher) may still occur. #