class lfmstartup(): """ Class for creating initial input files for LFM code Eventually this will include LFM, MFLFM Right now only LFM is supported """ def __init__(self, fileName, dims, nspecies=1): """ Create the HDF file Inputs: fileName - Name of file to create dims - (NI,NJ,NK) tuple of grid size nspecies - number of ion speices (default 1) """ (self.ni, self.nj, self.nk) = dims self.fileName = fileName self.varNames = [ 'X_grid', 'Y_grid', 'Z_grid', 'rho_', 'vx_', 'vy_', 'vz_', 'c_', 'bx_', 'by_', 'bz_', 'bi_', 'bj_', 'bk_', 'ei_', 'ej_', 'ek_', 'ai_', 'aj_', 'ak_' ] if (nspecies > 1): for i in range(1, nspecies + 1): for var in ['rho_.', 'vx_.', 'vy_.', 'vz_.', 'c_.']: self.varNames.append(var + str(i)) self.varUnits = [ 'cm', 'cm', 'cm', 'g/cm^3', 'cm/s', 'cm/s', 'cm/s', 'cm/s', 'gauss', 'gauss', 'gauss', 'gauss*cm^2', 'gauss*cm^2', 'gauss*cm^2', 'cgs*cm', 'cgs*cm', 'cgs*cm', 'dummy', 'dummy', 'dummy' ] if (nspecies > 1): for i in range(1, nspecies + 1): for var in ['g/cm^3', 'cm/s', 'cm/s', 'cm/s', 'cm/s']: self.varUnits.append(var) def open(self, mjd=0.0, tzero=3000.0): """ Open the HDF file and set the global attributes Inputs: MJD - Modified Julian Date - default 0.0 tzero - Solar wind initialization time - default 3000.0 """ self.f = SD(self.fileName, mode=SDC.WRITE | SDC.CREATE) self.setGlobalAttr(mjd, tzero) self.initVar() return def setGlobalAttr(self, mjd, tzero): self.f.attr('mjd').set(SDC.FLOAT64, mjd) self.f.attr('time_step').set(SDC.INT32, 0) self.f.attr('time_8byte').set(SDC.FLOAT64, 0.) self.f.attr('time').set(SDC.FLOAT32, 0.) self.f.attr('tilt_angle').set(SDC.FLOAT32, 0.) self.f.attr('tzero').set(SDC.FLOAT32, tzero) self.f.attr('file_contents').set(SDC.CHAR, 'a') self.f.attr('dipole_moment').set(SDC.CHAR, 'b') self.f.attr('written_by').set(SDC.CHAR, 'Python initialzer') return def initVar(self): vars = {} for varName, varUnit in zip(self.varNames, self.varUnits): vars[varName] = self.f.create( varName, SDC.FLOAT32, (self.nk + 1, self.nj + 1, self.ni + 1)) vars[varName].attr('ni').set(SDC.INT32, self.ni + 1) vars[varName].attr('nj').set(SDC.INT32, self.nj + 1) vars[varName].attr('nk').set(SDC.INT32, self.nk + 1) vars[varName].attr('units').set(SDC.CHAR, varUnit) vars[varName][:] = n.zeros((self.nk + 1, self.nj + 1, self.ni + 1), dtype='float32') def writeVar(self, varName, arr): """ Writes Array to HDF File Inputs varName - Name of variable to add arr - 3d array to add to file """ iend = arr.shape[2] jend = arr.shape[1] kend = arr.shape[0] self.f.select(varName)[:kend, :jend, :iend] = arr.astype('float32') return def close(self): self.f.end() return
import matplotlib.pyplot as plt from osgeo import gdal file_name = 'cali_2018_data/MOD14A2.A2018145.h08v04.006.2018154000419.hdf' file = SD(file_name, SDC.READ) #dir(file) print(file.info()) #xdim = file.select('XDim') x = file.attributes() for idx, sds in enumerate(x.keys()): print(idx, sds) print(file.attr(idx).info()) print(file.attr(idx).get()) #items in locations 1 and 13 have data #struct and archive metadata datasets_dic = file.datasets() for idx, sds in enumerate(datasets_dic.keys()): print(idx, sds) UpperLeftPointMtrs = (-11119505.196667, 5559752.598333) LowerRightMtrs = (-10007554.677000, 4447802.078667) sds_obj = file.select('FireMask') print(sds_obj.info())
#_local directories dir_colloc = os.path.join(os.environ['WORK'], 'colloc') dir_airs = os.path.join(os.environ['PRODUCTS'], 'sips/airs') dir_caliop = os.path.join(os.environ['PRODUCTS'], 'sips/caliop') for file_col, file_dict in pickle.iteritems(): #_fix local location file_col = os.path.join(dir_colloc, file_col.split('/')[-1]) #_pull out airs and caliop files and fix them, too file_airs = file_dict['airs'] file_caliop = file_dict['caliop'] file_airs = os.path.join(dir_airs, file_airs.split('/')[-1]) file_caliop = os.path.join(dir_caliop, file_caliop[0].split('/')[-1]) #_write these to the file_col hdf = SD(file_col, SDC.WRITE) att = hdf.attr('fname_AIRS') att.set(SDC.CHAR8, file_airs) att = hdf.attr('fname_CALIOP') att.set(SDC.CHAR8, file_caliop) hdf.end() #_test output hdf = SD(file_col, SDC.READ) print hdf.attributes() hdf.end()