def convert_to_netcdf(datadir): """ Convert any files we find datadir to netCDF """ # Change directory os.chdir( datadir ) # Look for any MMOUT and NCOUT files files = glob.glob('NCOUT_DOMAIN1_[0-9][0-9][0-9]') files = files + glob.glob('MMOUTP_DOMAIN1_[0-9][0-9][0-9]') files.sort() # Loop over the files for file in files: # Skip the initial output files, we don't care about these if file == "NCOUT_DOMAIN1_000" or file == "MMOUTP_DOMAIN1_000": continue if os.path.isfile( file +".nc"): continue # Figure out how many timesteps there are. mm5 = mm5_class.mm5(file) cmd = "archiver %s 0 %s" % (file, mm5.tsteps) print "Converting %s to NetCDF %s tsteps" % (file, mm5.tsteps) si,so = os.popen4( cmd ) a = so.read() # Necessary to keep things blocking? if not os.path.isfile( file+".nc" ): print "FAIL!", file print a sys.exit() # Now we corrupt the grid, shift 30 degrees west nc = netCDF3.Dataset( file+".nc", 'a') nc.variables['coarse_cenlon'][:] = 138. nc.variables['stdlon'][:] = 138. nc.close()
def process_NCOUT(datadir): # Change directory os.chdir( datadir ) # Look for any MMOUT and NCOUT files files = glob.glob('NCOUT_DOMAIN1_[0-9][0-9][0-9][0-9]') files.sort() # Loop over the files for file in files: # Skip the initial output files, we don't care about these if file == "NCOUT_DOMAIN1_000": continue mm5 = mm5_class.mm5(file) cmd = "archiver %s 0 %s" % (file, mm5.tsteps) print "Converting %s to NetCDF %s tsteps" % (file, mm5.tsteps) si,so = os.popen4( cmd ) a = so.read() # Necessary to keep things blocking?
def extract_times(mm5file): """ Function to return an array of mx.DateTime instances for the timestamps in this MM5 file """ mm5 = mm5_class.mm5(mm5file) # Sample for first timestamp with variable u # Requires a modification to mm5_class.py to read header tstr = mm5.get_field_list()['u']['time'][:13] ts0 = mx.DateTime.strptime(tstr, "%Y-%m-%d_%H") interval = mm5.timeincrement ts1 = ts0 + mx.DateTime.RelativeDateTime( seconds=(interval * (mm5.tsteps-1) )) taxis = [] now = ts0 while (now <= ts1): taxis.append( now ) now += mx.DateTime.RelativeDateTime(seconds=interval) del(mm5) return taxis
def convert_to_netcdf_step(): """ Conversion of MM5 Format files to NetCDF """ # Change directory os.chdir( DATADIR ) # Look for any MMOUTP and NCOUT files files = glob.glob("NCOUT_DOMAIN1_??") files = files + glob.glob("MMOUTP_DOMAIN1_??") files.sort() # Loop over the files for file in files: if file == "NCOUT_DOMAIN1_00" or file == "MMOUTP_DOMAIN1_00": continue # Figure out how many timesteps there are. mm5 = mm5_class.mm5(file) cmd = "archiver %s 0 %s" % (file, mm5.tsteps) print "Converting %s to NetCDF %s tsteps" % (file, mm5.tsteps) si,so = os.popen4( cmd ) a = so.read() # Necessary to keep things blocking? if not os.path.isfile( file+".nc" ): print "FAIL!", file print a sys.exit()
lat[:] = nc2.variables['latitcrs'][15:-15,15:-15] lon[:] = nc2.variables['longicrs'][15:-15,15:-15] + 360.0 xc[:] = numpy.arange(15,139) * nc2.variables['grid_ds'][:] * 1000.0 yc[:] = numpy.arange(15,114) * nc2.variables['grid_ds'][:] * 1000.0 p.standard_parallel = [nc2.variables['stdlat_2'][:], nc2.variables['stdlat_1'][:]] p.longitude_of_central_meridian = nc2.variables['coarse_cenlon'][:] p.latitude_of_projection_origin = nc2.variables['coarse_cenlat'][:] lookup = {} table = open('VEGPARM.TBL', 'r').readlines() for line in table[3:30]: tokens = line.split(',') lookup[ int(tokens[0])] = float(tokens[4]) mm5 = mm5_class.mm5('MMINPUT_DOMAIN1.196801') data = mm5.get_field('land_use', 0)['values'] data = numpy.array(data[0]) lookup2 = {0: 0, 1: 0.1, 2: 0.4, 3: 1.0, 4: 2.0} shp = numpy.shape(data) sm = numpy.zeros( shp, 'f') for i in range(shp[0]): for j in range(shp[1]): # 1.8 meter soil depth x SMC sm[i,j] = lookup2[ lookup[ data[i,j] ] ] v[:] = sm[15:-16,15:-16] nc2.close()
def process_MMOUT(datadir): os.chdir(datadir) # Figure out a list of MMOUT files files = glob.glob("MMOUT_DOMAIN1_????") files.sort() # Move us to interpb for file in files: # We don't wish to convert this file, it is just along for the ride if file == "MMOUT_DOMAIN1_000": continue os.chdir("/mnt/howard/narccap/INTERPB") # Figure out time axis taxis = lib.extract_times(datadir+file) # Setup variable substitution values vars = {} vars['mm5file'] = datadir+file vars['syear'] = taxis[0].year vars['smonth'] = taxis[0].month vars['sday'] = taxis[0].day vars['shour'] = taxis[0].hour if taxis[0].day == 21: taxis[-1] = taxis[0] + mx.DateTime.RelativeDateTime(months=1,day=1) vars['eyear'] = taxis[-1].year vars['emonth'] = taxis[-1].month vars['eday'] = taxis[-1].day vars['ehour'] = taxis[-1].hour # Edit the namelist.input for interb data = open('namelist.tpl', 'r').read() out = open('namelist.input', 'w') out.write( data % vars ) out.close() # Run interb for each file print "Running INTERPB for %s [%s - %s]" % (file, taxis[0].strftime("%Y-%m-%d %H"), taxis[-1].strftime("%Y-%m-%d %H")) os.system("./interpb >& interpb.log") # Move output file to right location os.rename("MMOUTP_DOMAIN1", datadir + file.replace("UT", "UTP")) # Cleanup os.system("rm -rf FILE_*") # Gzip os.chdir( datadir ) #os.system("gzip %s" % (file,)) # Convert to NetCDF file = file.replace("UT", "UTP") mm5 = mm5_class.mm5(file) cmd = "archiver %s 0 %s" % (file, mm5.tsteps) print "Converting %s to NetCDF %s tsteps" % (file, mm5.tsteps) si,so = os.popen4( cmd ) a = so.read() # Necessary to keep things blocking? # Remove MMOUTP files if file[:6] == "MMOUTP": os.remove(file)
import pygrib import Ngl import random import sys # two levels, lat_98 , lon_98 grbs = pygrib.open('flx.ft06.2046010100.grb') print grbs[6]['values'] print grbs[7]['values'] lats, lons = grbs[6].latlons() lats = lats[:,0] lons = lons[0,:] # Our final values emm5 = mm5_class.mm5('MMOUT_DOMAIN1_46') edata = numpy.ravel(emm5.get_field('soil_m_1',0)["values"]) edata4 = numpy.ravel(emm5.get_field('soil_m_4',0)["values"]) elats = numpy.ravel(emm5.get_field('latitcrs',0)["values"]) elons = numpy.ravel(emm5.get_field('longicrs',0)["values"]) for i in range(len(elats)): elats[i] += (random.random() * 0.01) newdata = Ngl.natgrid(elons, elats, edata, lons, lats) grbs[6]['values'] = newdata newdata = Ngl.natgrid(elons, elats, edata4, lons, lats) grbs[7]['values'] = newdata o = open('flx.ft06.2046010100.grb-new', 'wb') grbs.rewind() for grb in grbs:
import Nio import pygrib import Ngl import random import sys # two levels, lat_98 , lon_98 grbs = pygrib.open('flx.ft06.2046010100.grb') print grbs[6]['values'] print grbs[7]['values'] lats, lons = grbs[6].latlons() lats = lats[:, 0] lons = lons[0, :] # Our final values emm5 = mm5_class.mm5('MMOUT_DOMAIN1_46') edata = numpy.ravel(emm5.get_field('soil_m_1', 0)["values"]) edata4 = numpy.ravel(emm5.get_field('soil_m_4', 0)["values"]) elats = numpy.ravel(emm5.get_field('latitcrs', 0)["values"]) elons = numpy.ravel(emm5.get_field('longicrs', 0)["values"]) for i in range(len(elats)): elats[i] += (random.random() * 0.01) newdata = Ngl.natgrid(elons, elats, edata, lons, lats) grbs[6]['values'] = newdata newdata = Ngl.natgrid(elons, elats, edata4, lons, lats) grbs[7]['values'] = newdata o = open('flx.ft06.2046010100.grb-new', 'wb') grbs.rewind() for grb in grbs:
# Generate a land/sea mask for the MRED domain import netCDF3 import mm5_class import mx.DateTime mm5 = mm5_class.mm5('TERRAIN_DOMAIN1') land = mm5.get_field('landmask', 0) # 1,143,208 data = land['values'] lats = mm5.get_field('latitdot',0)['values'] lons = mm5.get_field('longidot',0)['values'] nc = netCDF3.Dataset('LANDSEA_IMM5.nc', 'w') nc.institution = "Iowa State University, Ames, IA, USA" nc.source = "MM5 (2009): atmosphere: MM5v3.6.3 non-hydrostatic, split-explicit; sea ice: Noah; land: Noah" nc.project_id = "MRED" nc.table_id = "Table 2" nc.realization = 1 nc.forcing_data = "CFS01" # Optional nc.Conventions = 'CF-1.0' nc.contact = "Daryl Herzmann, [email protected], 515-294-5978" nc.history = "%s Generated" % (mx.DateTime.now().strftime("%d %B %Y"),) nc.comment = "Runs processed on derecho@ISU, output processed on mred@ISU" nc.title = "ISU MM5 model output prepared for MRED using CFS input" nc.createDimension('y', 143) nc.createDimension('x', 208)
import mm5_class import mx.DateTime import os for i in range(1000): fp = '../testdata/MMOUT_DOMAIN1_%03i' % (i,) if not os.path.isfile( fp ): continue mm5 = mm5_class.mm5( fp ) tdict = mm5.reftime ts = mx.DateTime.DateTime(tdict['year'], tdict['month'], tdict['day'], tdict['hour']) incsec = mm5.timeincrement steps = mm5.tsteps endminute = mm5.mm5_header[('bhr', 1, 12)][1] tsend = ts + mx.DateTime.RelativeDateTime(minutes=endminute) print fp, tsend, endminute del( mm5 )