예제 #1
0
def finalize_step( datadir ):
    """
    This is going to be the most complicated, have to generate the necessary
    output files with everything set A-OK
    """
    os.chdir( datadir )
    # Loop over each variable
    for vname in outputvars.keys():
        meta = outputvars[ vname ]
        # Create the NetCDF File
        fname = generate_filename(vname)
        nc = netCDF3.Dataset( fname , 'w')
        nc_setup( nc , vname)
        # Now we create the variable of interest
        myvar = nc.createVariable(vname, 'f', ('time','lat','lon'))
        # Assign Units
        myvar.units = meta['units']
        # Assign Standard Name
        myvar.standard_name = meta['sname']
        # Assign Long Name
        myvar.long_name = meta["lname"]
        # Fill Value
        myvar._FillValue = 1.e20
        # Coordinate Singleton
        if meta.has_key('coord'):
            myvar.coordinates = meta['coord']
        # Cell Methods
        if meta.has_key('cellm'):
            myvar.cell_methods = meta['cellm']
        # Positive
        if meta.has_key('positive'):
            myvar.positive = meta['positive']
        # Original Name of the variable in the MM5 file
        myvar.original_name = meta['vname']

        # Lets get data already!
        files = glob.glob( datasources[ meta['source'] ])
        files.sort() # Make sure time lines up this way!
        tcounter = 0
        for file in files:
            tnc = netCDF3.Dataset( file )
            # Get the variable from the NetCDF File
            tvar = tnc.variables[ meta['vname'] ][:,:,:]
            if meta.has_key('vindex'):
                tvar = tnc.variables[ meta['vname'] ][:,meta['vindex'],:,:]
            # Figure out the time dimension length, always first
            tlen = tvar.shape[0]
            print '%s [%s,%s] %s sample: %.2f shape: %s' % (
                 vname, tcounter,
                  tcounter+tlen, file, tvar[0,10,10],
                  tvar.shape )
            if meta.has_key('quo'):
                tvar = tvar / meta['quo']
            myvar[tcounter:(tcounter+tlen)] = tvar
            tcounter += tlen
            tnc.close()
            del(tnc)

        # Done with var
        nc.close()
예제 #2
0
def convert_to_netcdf(datadir):
    """
    Convert any files we find datadir to netCDF
    """
    # Change directory
    os.chdir( datadir )
    # Look for any MMOUT and NCOUT files
    files = glob.glob('NCOUT_DOMAIN1_[0-9][0-9][0-9]')
    files = files + glob.glob('MMOUTP_DOMAIN1_[0-9][0-9][0-9]')
    files.sort()
    # Loop over the files
    for file in files:
        # Skip the initial output files, we don't care about these
        if file == "NCOUT_DOMAIN1_000" or file == "MMOUTP_DOMAIN1_000":
            continue
        if os.path.isfile( file +".nc"):
            continue
        # Figure out how many timesteps there are.
        mm5 = mm5_class.mm5(file)
        cmd = "archiver %s 0 %s" % (file, mm5.tsteps)
        print "Converting %s to NetCDF %s tsteps" % (file, mm5.tsteps)
        si,so = os.popen4( cmd )
        a = so.read() # Necessary to keep things blocking?
        if not os.path.isfile( file+".nc" ):
            print "FAIL!", file
            print a
            sys.exit()
        # Now we corrupt the grid, shift 30 degrees west
        nc = netCDF3.Dataset( file+".nc", 'a')
        nc.variables['coarse_cenlon'][:] = 138.
        nc.variables['stdlon'][:] = 138.
        nc.close()
예제 #3
0
def main():
    """
    My main method
    """
    # Command line argument number one
    vname = sys.argv[1]
    # Change directory
    os.chdir("final")
    gridx = 0
    gridy = 0
    # print out a header
    print "%-10s %-8s %-12s %14s %14s %14s" % ("RUNID", "VARIABLE", "UNITS",
                                               "MINIMUM", "AVERAGE", "MAXIMUM")
    # Look for all directories in the final/ folder
    for runid in glob.glob("*"):
        # Change to that directory
        os.chdir("/mnt/tera11/mred/postprocess/final/" + runid)
        # open the netcdf file I care about
        file = "%s_IMM5_%s3_CFS01.nc" % (vname, runid[:-1])
        nc = netCDF3.Dataset(file, 'r')
        if gridx == 0:
            gridx, gridy = find_grid(nc)

        # pull out the data
        data = nc.variables[vname][:, gridy, gridx]
        # Check if max value in the data is larger than 1
        if numpy.max(data) > 1:
            # Print out some information
            print "%s %-8s %-12s %14.6f %14.6f %14.6f" % (
                runid, vname, nc.variables[vname].units, numpy.min(data),
                numpy.average(data), numpy.max(data))
예제 #4
0
파일: master.py 프로젝트: akrherz/mred
def special_runoff():
    """
    Special function to compute total runoff, actually easy
    """
    os.chdir( DATADIR )
    fname = generate_filename("mrro")
    nc = netCDF3.Dataset( fname , 'w')
    nc_setup( nc , 'mrro')

    myvar = nc.createVariable('mrro', 'f', ('time','lat','lon'))
    myvar.units = 'kg m-2 s-1'
    myvar.standard_name = 'runoff_flux'
    myvar.coordinates = "lon lat"
    myvar.long_name = "Surface and Subsurface Runoff"
    myvar.cell_methods = "time: average (interval: 3 hours)"
    myvar._FillValue = 1.e20
    myvar.original_name = 'sfcrnoff+ugdrnoff'

    files = glob.glob( datasources[ MMOUT ])
    files.sort() # Make sure time lines up this way!
    tcounter = 0
    for file in files:
        tnc = netCDF3.Dataset( file )
        # Surface runoff
        surface = tnc.variables['sfcrnoff'][:]
        # Subsurface runoff
        subsurface = tnc.variables['ugdrnoff'][:]
        tsteps = surface.shape[0]
        for i in range(tsteps):
            if i == 0 and tcounter == 0:
                s0 = surface[i]
                ss0 = subsurface[i]
            else:
                s0 = surface[i] - sold
                ss0 = subsurface[i] - ssold
            sold = surface[i]
            ssold = subsurface[i]
            myvar[tcounter+i] = (s0 + ss0) / 10800.0
        tcounter += tsteps
        del(tnc)

    nc.close()
예제 #5
0
def create_netcdf():
    nc = netCDF3.Dataset("data/asosgrid.nc", 'w')
    # Dimensions
    nc.createDimension('time', int((STOP - BASE).hours) )
    nc.createDimension('lat', int((NORTH-SOUTH)/DELTAY) )
    nc.createDimension('lon', int((EAST-WEST)/DELTAX)   )
    # Variables
    tm = nc.createVariable('time', 'd', ('time',))
    tm.units = 'hours since 1970-01-01'
    tm[:] = range( int((STOP - BASE).hours) )

    lat = nc.createVariable('lat', 'd', ('lat',))
    lat.units = 'degrees north'
    lat.long_name = 'Latitude'
    lat.axis = 'Y'
    lat[:] = numpy.arange(SOUTH, NORTH, DELTAY)

    lon = nc.createVariable('lon', 'd', ('lon',))
    lon.units = 'degrees east'
    lon.long_name = 'Longitude'
    lon.axis = 'X'
    lon[:] = numpy.arange(WEST, EAST, DELTAX)

    tmpk = nc.createVariable('tmpk', 'f', ('time','lat','lon'))
    tmpk.units = 'K'
    tmpk._FillValue = 1.e20
    tmpk.missing_value = 1.e20
    tmpk.long_name = 'Surface Air Temperature'

    smps = nc.createVariable('smps', 'f', ('time','lat','lon'))
    smps.units = 'm s-1'
    smps._FillValue = 1.e20
    smps.missing_value = 1.e20
    smps.long_name = '10m Wind Speed'

    skyc = nc.createVariable('skyc', 'f', ('time','lat','lon'))
    skyc.units = '%'
    skyc._FillValue = 1.e20
    skyc.missing_value = 1.e20
    skyc.long_name = 'Sky Coverage'

    p01m = nc.createVariable('p01m', 'f', ('time','lat','lon'))
    p01m.units = 'mm'
    p01m._FillValue = 1.e20
    p01m.missing_value = 1.e20
    p01m.long_name = 'Precipitation'

    relh = nc.createVariable('relh', 'f', ('time','lat','lon'))
    relh.units = '%'
    relh._FillValue = 1.e20
    relh.missing_value = 1.e20
    relh.long_name = 'Relative Humidity'

    nc.close()
예제 #6
0
def special_precip( datadir ):
    """
    Need a special function to compute precip
    """
    os.chdir( datadir  )
    fname = generate_filename("pr")
    nc = netCDF3.Dataset( fname , 'w')
    nc_setup( nc , 'pr')

    myvar = nc.createVariable('pr', 'f', ('time','lat','lon'))
    myvar.units = 'kg m-2 s-1'
    myvar.standard_name = 'precipitation_flux'
    myvar.long_name = "Precipitation"
    myvar._FillValue = 1.e20
    myvar.coordinates = "lon lat"
    myvar.cell_methods = "time: average (interval: 3 hours)"
    myvar.original_name = 'raincon+rainnon'
    myvar.history = 'v2 code 20090806'

    files = glob.glob( datasources[ MMOUT ])
    files.sort() # Make sure time lines up this way!
    tcounter = 0
    for file in files:
        tnc = netCDF3.Dataset( file )
        # Values are in cm
        non = tnc.variables['rain_non']
        tsteps = non.shape[0]
        for i in range(tsteps):
            con = tnc.variables['rain_con'][i]
            non = tnc.variables['rain_non'][i]
            tot = non + con
            #print tcounter, max( max( tot ) )
            # Write out! Convert to kg m-2 and divide by 10800 secs
            # Input is cm need to x10 to get to mm == kg m^-2
            myvar[tcounter] = (tot * 10.0 / 10800.0).astype('f')
            tcounter += 1
        tnc.close()
        del(tnc)

    nc.close()
예제 #7
0
def special_spechumidity( datadir ):
    """
    Special function to compute Specific Humidity
    """
    os.chdir( datadir )
    fname = generate_filename("huss")
    nc = netCDF3.Dataset( fname , 'w')
    nc_setup( nc , 'huss')

    myvar = nc.createVariable('huss', 'f', ('time','lat','lon'))
    myvar.units = 'kg kg-1'
    myvar.standard_name = 'specific_humidity'
    myvar.long_name = "Surface Specific Humidity"
    myvar.cell_methods = "time: instantaneious"
    myvar._FillValue = 1.e20
    myvar.coordinates = "lon lat height"
    myvar.original_name = 'q2/(1+q2)'

    files = glob.glob( datasources[ MMOUT ])
    files.sort() # Make sure time lines up this way!
    tcounter = 0
    for file in files:
        tnc = netCDF3.Dataset( file )
        # 2m mixing ratio kg kg-1
        q2 = tnc.variables['q2'][:]
        tsteps = q2.shape[0]
        myvar[tcounter:(tcounter+tsteps)] = (q2 / (1.0 + q2)).astype('f')
        tcounter += tsteps
        tnc.close()
        del(tnc)

      # Compute Dew Point
      #d2 = t2[i] / (1+ 0.000425 * t2[i] * -(Numeric.log10(rh[i]/100.0)) )
      # Compute Saturation vapor pressure
      #pws = Numeric.exp( 77.3450 + (0.0057 * d2) - (7235 / d2)) / Numeric.power(d2,8.2)  
      #sh = 0.62198 * pws / (100000 + pws)

    nc.close()
예제 #8
0
파일: master.py 프로젝트: akrherz/mred
def special_soilw():
    """
    Need a special function to compute soil moisture content kg m-2
    """
    os.chdir( DATADIR )
    fname = generate_filename("mrso")
    nc = netCDF3.Dataset( fname , 'w')
    nc_setup( nc , 'mrso')

    myvar = nc.createVariable('mrso', 'f', ('time','lat','lon'))
    myvar.units = 'kg m-2'
    myvar.standard_name = 'soil_moisture_content'
    myvar.long_name = "Total Soil Moisture Content"
    myvar.cell_methods = "time: instantaneious"
    myvar.coordinates = "lon lat"
    myvar._FillValue = 1.e20
    myvar.original_name = '((soil_w_1*0.1)+(soil_w_2*0.3)+(soil_w_3*0.6)+soil_w_4)/1000.0/2.0'

    files = glob.glob( datasources[ MMOUT ])
    files.sort() # Make sure time lines up this way!
    tcounter = 0
    for file in files:
        tnc = netCDF3.Dataset( file )
        # layer 1, m-3 m-3  * depth of layer [m]
        l1 = tnc.variables['soil_w_1'][:] * 0.1
        # layer 2
        l2 = tnc.variables['soil_w_2'][:] * 0.3
        # layer 3
        l3 = tnc.variables['soil_w_3'][:] * 0.6
        # layer 4
        l4 = tnc.variables['soil_w_4'][:] * 1.0
        # Total depth of water in 2m, m  1mm = 1kg
        tot = l1 + l2 + l3 + l4
        tsteps = l1.shape[0]
        myvar[tcounter:(tcounter+tsteps)] = (tot / 1000.0 / 2.0).astype('f')
        tcounter += tsteps
        del(tnc)
    nc.close()
예제 #9
0
def main():
    runid = sys.argv[1]
    os.chdir("final/%s" % (runid,))
    files = glob.glob("*.nc")
    gridx = 0
    gridy = 0
    print "%-8s %-12s %14s %14s %14s" % ("VARIABLE", "UNITS", "MINIMUM",
         "AVERAGE", "MAXIMUM") 
    for file in files:
        nc = netCDF3.Dataset(file, 'r')
        if gridx == 0:
            gridx, gridy = find_grid( nc )

        vname = file.split("_")[0]
        data = nc.variables[vname][:,gridy,gridx]
        print "%-8s %-12s %14.6f %14.6f %14.6f" % (vname, 
             nc.variables[vname].units, numpy.min( data ),
             numpy.average( data ), numpy.max( data ) )
예제 #10
0
파일: grid_coop.py 프로젝트: akrherz/MSDOT
def create_netcdf():
    nc = netCDF3.Dataset("data/coopgrid.nc", 'w')
    # Dimensions
    nc.createDimension('time', 0)
    nc.createDimension('lat', int((NORTH - SOUTH) / DELTAY))
    nc.createDimension('lon', int((EAST - WEST) / DELTAX))
    # Variables
    tm = nc.createVariable('time', 'd', ('time', ))
    tm.units = 'days since 1970-01-01'
    tm[:] = range(int((STOP - BASE).days))

    lat = nc.createVariable('lat', 'd', ('lat', ))
    lat.units = 'degrees north'
    lat.long_name = 'Latitude'
    lat.axis = 'Y'
    lat[:] = numpy.arange(SOUTH, NORTH, DELTAY)

    lon = nc.createVariable('lon', 'd', ('lon', ))
    lon.units = 'degrees east'
    lon.long_name = 'Longitude'
    lon.axis = 'X'
    lon[:] = numpy.arange(WEST, EAST, DELTAX)

    high = nc.createVariable('high', 'f', ('time', 'lat', 'lon'))
    high.units = 'K'
    high._FillValue = 1.e20
    high.missing_value = 1.e20
    high.long_name = 'Daily High Temperature'

    low = nc.createVariable('low', 'f', ('time', 'lat', 'lon'))
    low.units = 'K'
    low._FillValue = 1.e20
    low.missing_value = 1.e20
    low.long_name = 'Daily Low Temperature'

    pday = nc.createVariable('p01d', 'f', ('time', 'lat', 'lon'))
    pday.units = 'mm'
    pday._FillValue = 1.e20
    pday.missing_value = 1.e20
    pday.long_name = 'Daily Precipitation'

    nc.close()
예제 #11
0
         WHERE station in %s and 
         valid >= '%s' and valid < '%s' GROUP by station""" % (
         ts.gmtime().year, ids, 
         ts.strftime("%Y-%m-%d %H:%M"),
     (ts + mx.DateTime.RelativeDateTime(hours=1)).strftime("%Y-%m-%d %H:%M") )
    rs = asos.query( sql ).dictresult()
    if len(rs) > 4:
        grid_tmpf(nc, ts, rs)
        grid_relh(nc, ts, rs)
        grid_wind(nc, ts, rs)
        grid_skyc(nc, ts, rs)
        grid_p01m(nc, ts, rs)
    else:
        print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d %H:%M"), 
            len(rs))

#
#create_netcdf()
#sys.exit()
load_stationtable()
nc = netCDF3.Dataset("data/asosgrid.nc", 'a')
now = sts
#now = mx.DateTime.DateTime(1980,1,1)
while now < ets:
  #print now
  #if now not in badtimes:
  grid_hour(nc , now)
  now += mx.DateTime.RelativeDateTime(hours=1)

nc.close()
예제 #12
0
  y = int(( 50.0 - lat ) / 0.005 )
  return x, y
#iowa
#lat0 = 40.38
#lat1 = 43.50
#lon0 = -96.64
#lon1 = -90.14
lat0 = iemre.SOUTH
lat1 = iemre.NORTH
lon0 = iemre.WEST
lon1 = iemre.EAST
x0, y0 = lalo2pt(lat1, lon0)
#lr
x1, y1 = lalo2pt(lat0, lon1)

nc = netCDF3.Dataset('time.nc', 'w')
nc.createDimension('latitude', y1-y0)
nc.createDimension('longitude', x1-x0)

cnts = nc.createVariable('cnt', numpy.float, ('latitude','longitude'))
lats = nc.createVariable('lats', numpy.float, ('latitude'))
lons = nc.createVariable('lons', numpy.float, ('longitude'))

print x1-x0, numpy.shape( numpy.arange(lon0, lon1, 0.005))
lats[:] = numpy.arange(lat0, lat1, 0.005)
lons[:] = numpy.arange(lon0, lon1, 0.005)

data = numpy.zeros( (y1-y0,x1-x0), numpy.float )

now = sts
while (now < ets):
예제 #13
0
# Generate .hcd file for Pavement Design Guide

#YYYYMMDDHH,Temperature (F),Wind speed (mph),% Sun shine, Precipitation, Relative humidity.

import math
import numpy
import netCDF3
import mx.DateTime, sys
from pyIEM import mesonet

anc = netCDF3.Dataset("data/asosgrid.nc", 'r')
atmpk = anc.variables['tmpk']
asmps = anc.variables['smps']
askyc = anc.variables['skyc']
arelh = anc.variables['relh']
ap01m = anc.variables['p01m']

cnc = netCDF3.Dataset("data/coopgrid.nc", 'r')
ahigh = cnc.variables['high']
alow = cnc.variables['low']
ap01d = cnc.variables['p01d']


def hourly_fitter_temp(asos, base, trange):
    """
    Use the hourly fit of asos data to do something with the COOP data
    """
    weights = (asos - numpy.min(asos)) / (numpy.max(asos) - numpy.min(asos))
    #if (base + trange) > 100:
    #   print
    #   print trange, base
예제 #14
0
    def write_netcdf(self, outfile):
        """
        Output a netCDF file containing the results of the calculation
        specified by the GridCalc object.

        Each stress field encapsulated in the GridCalc object will be output
        within the netCDF file as three data fields, one for each of the stress
        tensor components L{Ttt}_NAME, L{Tpt}_NAME, L{Tpp}_NAME, where NAME is
        the name of the L{StressDef} object (e.g. L{Diurnal} or L{NSR}).

        Writing out the calculation results causes the calculation to take
        place.  No mechanism for performing the calculation and retaining it
        in memory for manipulation is currently provided.

        """

        # Create a netCDF file object to stick the calculation results in:
        nc_out = netCDF3.Dataset(outfile, 'w')

        # Set metadata fields of nc_out appropriate to the calculation at hand.

        nc_out.description = "satstress calculation on a regular grid.  All parameter units are SI (meters-kilograms-seconds)"
        nc_out.history = """Created: %s using the satstress python package: http://code.google.com/p/satstress""" % (
            time.ctime(time.time()))
        nc_out.Conventions = __NETCDF_CONVENTIONS__

        ########################################################################
        # Independent (input) parameters for the run:
        ########################################################################
        nc_out.grid_id = self.grid.grid_id
        nc_out.system_id = self.grid.satellite.system_id
        nc_out.planet_mass = self.grid.satellite.planet_mass
        nc_out.orbit_eccentricity = self.grid.satellite.orbit_eccentricity
        nc_out.orbit_semimajor_axis = self.grid.satellite.orbit_semimajor_axis

        nc_out.layer_id_0 = self.grid.satellite.layers[0].layer_id
        nc_out.density_0 = self.grid.satellite.layers[0].density
        nc_out.lame_mu_0 = self.grid.satellite.layers[0].lame_mu
        nc_out.lame_lambda_0 = self.grid.satellite.layers[0].lame_lambda
        nc_out.thickness_0 = self.grid.satellite.layers[0].thickness
        nc_out.viscosity_0 = self.grid.satellite.layers[0].viscosity
        nc_out.tensile_str_0 = self.grid.satellite.layers[0].tensile_str

        nc_out.layer_id_1 = self.grid.satellite.layers[1].layer_id
        nc_out.density_1 = self.grid.satellite.layers[1].density
        nc_out.lame_mu_1 = self.grid.satellite.layers[1].lame_mu
        nc_out.lame_lambda_1 = self.grid.satellite.layers[1].lame_lambda
        nc_out.thickness_1 = self.grid.satellite.layers[1].thickness
        nc_out.viscosity_1 = self.grid.satellite.layers[1].viscosity
        nc_out.tensile_str_1 = self.grid.satellite.layers[1].tensile_str

        nc_out.layer_id_2 = self.grid.satellite.layers[2].layer_id
        nc_out.density_2 = self.grid.satellite.layers[2].density
        nc_out.lame_mu_2 = self.grid.satellite.layers[2].lame_mu
        nc_out.lame_lambda_2 = self.grid.satellite.layers[2].lame_lambda
        nc_out.thickness_2 = self.grid.satellite.layers[2].thickness
        nc_out.viscosity_2 = self.grid.satellite.layers[2].viscosity
        nc_out.tensile_str_2 = self.grid.satellite.layers[2].tensile_str

        nc_out.layer_id_3 = self.grid.satellite.layers[3].layer_id
        nc_out.density_3 = self.grid.satellite.layers[3].density
        nc_out.lame_mu_3 = self.grid.satellite.layers[3].lame_mu
        nc_out.lame_lambda_3 = self.grid.satellite.layers[3].lame_lambda
        nc_out.thickness_3 = self.grid.satellite.layers[3].thickness
        nc_out.viscosity_3 = self.grid.satellite.layers[3].viscosity
        nc_out.tensile_str_3 = self.grid.satellite.layers[3].tensile_str

        ########################################################################
        # A selection of dependent (output) parameters for the run.
        ########################################################################
        nc_out.satellite_radius = self.grid.satellite.radius()
        nc_out.satellite_mass = self.grid.satellite.mass()
        nc_out.satellite_density = self.grid.satellite.density()
        nc_out.satellite_surface_gravity = self.grid.satellite.surface_gravity(
        )
        nc_out.satellite_orbit_period = self.grid.satellite.orbit_period()

        ########################################################################
        # What about Frequency dependent Parameters?
        ########################################################################
        # There are a variety of interesting frequency-dependent parameters
        # that we ought to really output as well (delta, and the complex Love
        # numbers, Lame parameters...) for reference, but they'll all depend on
        # exactly which slice (for instance) in NSR we're looking at, so the
        # right place to put this meta-data isn't really in the global section
        # it should be associated with the stress variables themselves.
        #
        # It seems that the right way to do this is to define several 1-d
        # variables that are not coordinate variables (that is, they don't
        # correspond to one of the dimensions, unlike latitude, or longitude)
        # e.g.:
        # nc_out.createVariable('nsr_delta_upper', self.grid.nsr_period_num, ('nsr_period'))
        # nc_out.createVariable('nsr_lame_mu_twiddle', self.grid.nsr_period_num, ('nsr_period'))

        ########################################################################
        # A few parameters pertaining to the web-interface only:
        ########################################################################
        # TODO
        nc_out.ssweb_run_id = ""
        nc_out.ssweb_username = ""
        nc_out.ssweb_ip_address = ""

        ########################################################################
        # Specify the size and shape of the output datacube.
        ########################################################################

        # LATITUDE:
        nc_out.createDimension('latitude', self.grid.lat_num)
        lats = nc_out.createVariable('latitude', 'f4', ('latitude', ))
        lats.units = "degrees_north"
        lats.long_name = "latitude"
        lats[:] = numpy.linspace(self.grid.lat_min, self.grid.lat_max,
                                 self.grid.lat_num)

        # LONGITUDE:
        nc_out.createDimension('longitude', self.grid.lon_num)
        lons = nc_out.createVariable('longitude', 'f4', ('longitude', ))
        lons.units = "degrees_east"
        lons.long_name = "longitude"
        lons[:] = numpy.linspace(self.grid.lon_min, self.grid.lon_max,
                                 self.grid.lon_num)

        # NSR_PERIOD
        nc_out.createDimension('nsr_period', self.grid.nsr_period_num)
        nsr_periods = nc_out.createVariable('nsr_period', 'f4',
                                            ('nsr_period', ))
        nsr_periods.units = "seconds"
        nsr_periods.long_name = "NSR period"
        nsr_periods[:] = numpy.logspace(numpy.log10(self.grid.nsr_period_min),
                                        numpy.log10(self.grid.nsr_period_max),
                                        self.grid.nsr_period_num)

        # TIME:
        # Check to see what kind of units we're using for time, and name the
        # variables and their units appropriately
        if self.grid.orbit_min is None:
            nc_out.createDimension('time', self.grid.time_num)
            times = nc_out.createVariable('time', 'f4', ('time', ))
            times.units = "seconds"
            times.long_name = "time after periapse"
            times[:] = numpy.linspace(self.grid.time_min, self.grid.time_max,
                                      self.grid.time_num)
        else:
            nc_out.createDimension('time', self.grid.orbit_num)
            times = nc_out.createVariable('time', 'f4', ('time', ))
            times.units = "degrees"
            times.long_name = "degrees after periapse"
            times[:] = numpy.linspace(self.grid.orbit_min, self.grid.orbit_max,
                                      self.grid.orbit_num)

        # At this point, we should have all the netCDF dimensions and their
        # corresponding coordinate variables created (latitutde, longitude,
        # time/orbit, nsr_period), but we still haven't created the data
        # variables, which will ultimately hold the results of our stress
        # calculation, and which depend on the aforedefined dimensions

        # DIURNAL:
        Ttt_Diurnal = nc_out.createVariable('Ttt_Diurnal', 'f4', (
            'time',
            'latitude',
            'longitude',
        ))
        Ttt_Diurnal.units = "Pa"
        Ttt_Diurnal.long_name = "north-south component of Diurnal eccentricity stresses"

        Tpt_Diurnal = nc_out.createVariable('Tpt_Diurnal', 'f4', (
            'time',
            'latitude',
            'longitude',
        ))
        Tpt_Diurnal.units = "Pa"
        Tpt_Diurnal.long_name = "shear component of Diurnal eccentricity stresses"

        Tpp_Diurnal = nc_out.createVariable('Tpp_Diurnal', 'f4', (
            'time',
            'latitude',
            'longitude',
        ))
        Tpp_Diurnal.units = "Pa"
        Tpp_Diurnal.long_name = "east-west component of Diurnal eccentricity stresses"

        # NSR:
        Ttt_NSR = nc_out.createVariable('Ttt_NSR', 'f4', (
            'nsr_period',
            'latitude',
            'longitude',
        ))
        Ttt_NSR.units = "Pa"
        Ttt_NSR.long_name = "north-south component of NSR stresses"

        Tpt_NSR = nc_out.createVariable('Tpt_NSR', 'f4', (
            'nsr_period',
            'latitude',
            'longitude',
        ))
        Tpt_NSR.units = "Pa"
        Tpt_NSR.long_name = "shear component of NSR stresses"

        Tpp_NSR = nc_out.createVariable('Tpp_NSR', 'f4', (
            'nsr_period',
            'latitude',
            'longitude',
        ))
        Tpp_NSR.units = "Pa"
        Tpp_NSR.long_name = "east-west component of NSR stresses"

        # Get the StressDef objects corresponding to Diurnal and NSR stresses:
        for stress in self.stresscalc.stresses:
            if stress.__name__ is 'Diurnal':
                diurnal_stress = ss.StressCalc([
                    stress,
                ])
            if stress.__name__ is 'NSR':
                nsr_stress = ss.StressCalc([
                    stress,
                ])

        # Loop over the time variable, doing diurnal calculations over an orbit
        for t in range(len(times[:])):
            # We need some kind of progress update, and we need to make sure that
            # we have a representation of the time coordinate in seconds, because
            # that's what the satstress library expects - even if we're ultimately
            # communicating time to the user in terms of "degrees after periapse"
            if self.grid.orbit_min is None:
                time_sec = times[t]
            else:
                time_sec = diurnal_stress.stresses[0].satellite.orbit_period(
                ) * (times[t] / 360.0)

            print "Calculating Diurnal stresses at", times[t], times.long_name

            for lon in range(len(lons[:])):
                for lat in range(len(lats[:])):

                    Tau_D = diurnal_stress.tensor(theta = scipy.radians(90.0-lats[lat]),\
                                                    phi = scipy.radians(lons[lon]),\
                                                      t = time_sec )

                    nc_out.variables['Ttt_Diurnal'][t, lat, lon] = Tau_D[0, 0]
                    nc_out.variables['Tpt_Diurnal'][t, lat, lon] = Tau_D[1, 0]
                    nc_out.variables['Tpp_Diurnal'][t, lat, lon] = Tau_D[1, 1]

        # Make sure everything gets written out to the file.
        nc_out.sync()

        # Change the satellite's eccentricity to zero to exclude the Diurnal
        # stresses for the purposes of calculating the NSR stresses:
        nsr_stress.stresses[0].satellite.orbit_eccentricity = 0.0

        # Loop over all the prescribed values of NSR_PERIOD, and do the NSR stress calculation
        # at each point on the surface.
        for p_nsr in range(len(nsr_periods[:])):

            # Adjust the properties of the Satellite and StressDef objects
            # for the nsr_period being considered:
            new_sat = nsr_stress.stresses[0].satellite
            new_sat.nsr_period = nsr_periods[p_nsr]
            nsr_stress = ss.StressCalc([
                ss.NSR(new_sat),
            ])

            print "Calculating NSR stresses for Pnsr = %g %s" % (
                nsr_periods[p_nsr],
                nsr_periods.units,
            )
            for lon in range(len(lons[:])):
                for lat in range(len(lats[:])):
                    Tau_N = nsr_stress.tensor(theta = scipy.radians(90-lats[lat]),\
                                                phi = scipy.radians(lons[lon]),\
                                                  t = 0 )

                    nc_out.variables['Ttt_NSR'][p_nsr, lat, lon] = Tau_N[0, 0]
                    nc_out.variables['Tpt_NSR'][p_nsr, lat, lon] = Tau_N[1, 0]
                    nc_out.variables['Tpp_NSR'][p_nsr, lat, lon] = Tau_N[1, 1]

        # Make sure everything gets written out to the file.
        nc_out.sync()
예제 #15
0
from matplotlib import pyplot as plt
import numpy
from scipy import stats
import netCDF3

data = []
nc = netCDF3.Dataset("SNR_28.22_89.62_water.out.mon.nc")
data.append(nc.variables['pr'][:] * 240.)
nc.close()
nc = netCDF3.Dataset("SNR_30.42_88.92_water.out.mon.nc")
data.append(nc.variables['pr'][:] * 240.)
nc.close()
nc = netCDF3.Dataset("SNR_34.38_89.54_water.out.mon.nc")
data.append(nc.variables['pr'][:] * 240.)
nc.close()

fig = plt.figure()
ax = fig.add_subplot(111)

width = 0.20
bar1 = ax.bar(numpy.arange(12), data[0], width, color='b')
bar2 = ax.bar(numpy.arange(12) + width, data[1], width, color='g')
bar3 = ax.bar(numpy.arange(12) + (width * 2.), data[2], width, color='r')

ax.set_xticks(numpy.arange(12) + width)
ax.set_xticklabels([
    "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
    "Nov", "Dec"
])
ax.set_ylabel("Monthly Precipitation [mm]")
#ax.set_ylabel("$\mathrm{Temperature}\hspace{0.6}^{\circ}\mathrm{C}$")
예제 #16
0
import netCDF3, glob, numpy

for file in glob.glob("*.nc"):
  nc = netCDF3.Dataset(file, 'r')
  print file, numpy.sum( nc.variables['pr'][:] )
  nc.close()
예제 #17
0
import netCDF3
from matplotlib import pyplot as plt
import numpy
from scipy import stats


def k2f(ar):
    return (ar - 273.5) * 9.0 / 5.0 + 32.0


cnc = netCDF3.Dataset("HDC_30.42_88.92_temp_humid.out.nc", 'r')
ctas = cnc.variables["tas"]

fnc = netCDF3.Dataset("SNR_30.42_88.92_temp_humid.out.nc", 'r')
ftas = fnc.variables["tas"]

fig = plt.figure()
ax = fig.add_subplot(111)
width = 0.35

n, bins, patches = ax.hist(numpy.ravel(ctas[:] - 273.15),
                           50,
                           normed=1,
                           histtype='step',
                           label='Contemporary')
n2, bins2, patches2 = ax.hist(numpy.ravel(ftas[:] - 273.15),
                              50,
                              normed=1,
                              histtype='step',
                              label='Future')
plt.legend(loc=2)
예제 #18
0
from matplotlib import pyplot as plt
import numpy
from scipy import stats
import netCDF3

data = []
nc = netCDF3.Dataset("NCP_28.22_89.62_temp_humid.out.mon.nc")
data.append(nc.variables['tas'][:])
nc.close()
nc = netCDF3.Dataset("NCP_30.42_88.92_temp_humid.out.mon.nc")
data.append(nc.variables['tas'][:])
nc.close()
nc = netCDF3.Dataset("NCP_34.38_89.54_temp_humid.out.mon.nc")
data.append(nc.variables['tas'][:])
nc.close()

fig = plt.figure()
ax = fig.add_subplot(111)

width = 0.20
bar1 = ax.bar(numpy.arange(12), data[0] - 273.15, width, color='b')
bar2 = ax.bar(numpy.arange(12) + width, data[1] - 273.15, width, color='g')
bar3 = ax.bar(numpy.arange(12) + (width * 2.),
              data[2] - 273.15,
              width,
              color='r')

ax.set_xticks(numpy.arange(12) + width)
ax.set_xticklabels([
    "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
    "Nov", "Dec"
예제 #19
0
from matplotlib import pyplot as plt
import numpy
from scipy import stats
import netCDF3

nc = netCDF3.Dataset("SNR_30.42_88.92_water.out.mon.nc")
futured = nc.variables['pr'][:] * 240.
nc.close()

nc = netCDF3.Dataset("HDC_30.42_88.92_water.out.mon.nc")
contemp = nc.variables['pr'][:] * 240.
nc.close()

fig = plt.figure()
ax = fig.add_subplot(211)

width = 0.30
bar1 = ax.bar(numpy.arange(12) + width, futured, width, color='r')

bar2 = ax.bar(numpy.arange(12), contemp, width, color='b')

ax.set_xticks(numpy.arange(12) + width)
ax.set_xticklabels([
    "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
    "Nov", "Dec"
])
ax.set_ylabel("Precipitation [mm]")
#ax.set_ylabel("$\mathrm{Temperature}\hspace{0.6}^{\circ}\mathrm{C}$")
#ax.set_xlabel("Month")
ax.set_title("Mississippi Average Monthly Precipitation Change")
ax.grid(True)
예제 #20
0
# Generate a land/sea mask for the MRED domain
import netCDF3
import mm5_class
import mx.DateTime

mm5 = mm5_class.mm5('TERRAIN_DOMAIN1')
land = mm5.get_field('landmask', 0)

# 1,143,208
data = land['values']

lats =  mm5.get_field('latitdot',0)['values']
lons = mm5.get_field('longidot',0)['values']

nc = netCDF3.Dataset('LANDSEA_IMM5.nc', 'w')
nc.institution   = "Iowa State University, Ames, IA, USA"
nc.source        = "MM5 (2009): atmosphere: MM5v3.6.3 non-hydrostatic, split-explicit; sea ice: Noah; land: Noah"
nc.project_id    = "MRED"
nc.table_id      = "Table 2"
nc.realization   = 1
nc.forcing_data  = "CFS01"

# Optional
nc.Conventions   = 'CF-1.0'
nc.contact       = "Daryl Herzmann, [email protected], 515-294-5978"
nc.history       = "%s Generated" % (mx.DateTime.now().strftime("%d %B %Y"),)
nc.comment       = "Runs processed on derecho@ISU, output processed on mred@ISU"
nc.title         = "ISU MM5 model output prepared for MRED using CFS input"

nc.createDimension('y', 143)
nc.createDimension('x', 208)
예제 #21
0
# Need something to convert the text files from the RCM to netcdf

import netCDF3
import sys
import numpy
import mx.DateTime

fp = sys.argv[1]
t1979 = mx.DateTime.DateTime(1979, 1, 1)

nc = netCDF3.Dataset(fp + ".nc", 'w')
nc.createDimension('time', 0)

tm = nc.createVariable('time', numpy.float, ('time', ))
tm.units = 'hours since 1979-01-01 00:00:00+00'

tas = nc.createVariable('tas', numpy.float, ('time', ))
tas.units = 'K'

i = 0
for line in open(fp):
    tokens = line.split()
    if tokens[0] == "year":
        continue
    y, m, d, h = tokens[:4]
    #print y, m, d , h
    if int(m) == 2 and (int(d) == 29 or int(d) == 30 or int(d) == 31):
        continue
    ts = mx.DateTime.DateTime(1900 + int(y), int(m), int(d), int(h))
    tm[i] = (ts - t1979).hours
    tas[i] = tokens[8]
예제 #22
0
파일: grid_coop.py 프로젝트: akrherz/MSDOT
    sql = """SELECT stationid as station,
         high, low, precip * 25.4 as p01d
         from alldata_ms
         WHERE stationid in %s and 
         day = '%s'""" % (ids, ts.strftime("%Y-%m-%d"))
    rs = coop.query(sql).dictresult()
    if len(rs) > 4:
        grid_high(nc, ts, rs)
        grid_low(nc, ts, rs)
        grid_p01d(nc, ts, rs)
    else:
        print "%s has %02i entries, FAIL" % (ts.strftime("%Y-%m-%d %H:%M"),
                                             len(rs))


#
#create_netcdf()
#sys.exit()
load_stationtable()
nc = netCDF3.Dataset("data/coopgrid.nc", 'a')
now = sts
#now = mx.DateTime.DateTime(1980,1,1)
while now < ets:
    if now.day == 1 and now.month == 1:
        reset_precip(now.year)
    if now not in badtimes:
        grid_hour(nc, now)
    now += mx.DateTime.RelativeDateTime(days=1)

nc.close()
예제 #23
0
# Fix standard_name

import netCDF3, glob

files = glob.glob("final/*/mpvhusa*")
for file in files:
    nc = netCDF3.Dataset(file, 'a')
    print nc.variables['mpvhusa'].standard_name
    nc.close()
예제 #24
0
# a bar plot with errorbars
import numpy as np
import matplotlib.pyplot as plt
import netCDF3
import sys

for i in range(6): # 5 boxes
  mred = netCDF3.Dataset("hist_box%s_pr_IMM5_1997112103_CFS01.nc" % (i,))
  m_pr = mred.variables["pr"][0,:]
  cfs = netCDF3.Dataset("hist_box%s_pr_CFS_1997112103.nc" % (i,))
  c_pr = cfs.variables["pr"][0,:]

  if i == 0:
    # Average over each bin
    N = len(cfs.dimensions['bins']) 
    m_avg = np.zeros( (6,N), 'f')
    c_avg = np.zeros( (6,N), 'f')
    bins = cfs.variables['bins'][:]
  for b in range( len(cfs.dimensions['bins']) ):
    m_avg[i,b] = np.average(m_pr[b]) 
    c_avg[i,b] = np.average(c_pr[b]) 
  mred.close()
  cfs.close()

# Average over all boxes
m_avg = np.average(m_avg,0)
c_avg = np.average(c_avg,0)
m_avg = m_avg / np.sum(m_avg) * 100.0
c_avg = c_avg / np.sum(c_avg) * 100.0

ind = np.arange(N)  # the x locations for the groups
예제 #25
0
import netCDF3
from matplotlib import pyplot as plt
import numpy
from scipy import stats


nc = netCDF3.Dataset("data/yr_coopgrid.nc", 'r')
precip = nc.variables["p01d"]

pdata = numpy.zeros((40,), 'f')
for i in range(40):
  pdata[i] = numpy.average(precip[i,:,:]) * 365.0 / 25.4 # in/yr

fig = plt.figure()
ax = fig.add_subplot(111)
width = 0.70
bar1 = ax.bar( numpy.arange(40), pdata, width, color='b' )

ax.set_xticklabels( range(1970,2010,5) )
ax.set_ylabel("Precipitation [inch]")
ax.set_xlabel("Year")
ax.set_title("Mean Yearly Precipitation for Mississippi (1970-2009)")

slope, intercept, r_value, p_value, std_err = stats.linregress(numpy.arange(40),pdata)
ax.plot([0,40], [intercept, (40 * slope) + intercept], color='b')
print r_value, p_value

ax.legend( (bar1[0],), (r"$Precip: \frac{dP}{dyear} = %.3f , R^2 = %.2f$" % (slope, r_value ** 2),))

ax.set_ylim( 40, 90 )
예제 #26
0
import netCDF3

years = range(1982, 2009)
days = [
    '1111', '1112', '1113', '1114', '1115', '1121', '1122', '1123', '1124',
    '1125', '1129', '1130', '1201', '1202', '1203'
]

# Figure out our precipitation bins
# Floor: 0.25 mm/hour
# Max:   75   mm/hour  ??
# Interval: 0.25 mm/hour

bins = numpy.arange(0.25 / 3600.0, 75.0 / 3600.0, 0.25 / 3600.)

output = netCDF3.Dataset("mred_precip_histogram.nc", 'w')
output.createDimension("bins", len(bins) - 1)
output.createDimension("runid", len(years) * len(days))

data = output.createVariable("count", numpy.float32, ("runid", "bins"))
data.long_name = "Grid cell count"

ncbins = output.createVariable("bins", numpy.float32, ("bins"))
ncbins.long_name = "Precipitation Bins"
ncbins.units = "kg m-2 s-1"
ncbins[:] = bins[:-1]

cnt = 0
for year in years:
    for day in days:
        ar = numpy.zeros(None, 'f')