def write_matched_data(data):
    from netCDF4 import Dataset
    from libtools import unique, subset
    from libnva import sort_rec
    from numpy import argsort
    raise RuntimeError, 'USE LIVE_WRITE'
    cdf = Dataset('matched_data.nc', 'w', format='NETCDF3_CLASSIC')

    #_define dimensions
    time = unique(data.time)
    wnum = unique(data.wavenumber)
    nt = len(time)
    nw = len(wnum)
    cdf.createDimension('wavenumber', nw)
    cdf.createDimension('time', nt)

    cdf.createVariable('wavenumber', 'f8', ('wavenumber', ))
    cdf.createVariable('time', 'f8', ('time', ))
    cdf.variables['wavenumber'][:] = wnum
    cdf.variables['time'][:] = time

    cdf.createVariable('cloud', 'f4', ('time', ))
    cdf.createVariable('latitude', 'f8', ('time', ))
    cdf.createVariable('longitude', 'f8', ('time', ))

    cdf.createVariable('radiances_lbldis',
                       'f8', ('time', 'wavenumber'),
                       fill_value=-9999)
    cdf.createVariable('radiances_airs',
                       'f8', ('time', 'wavenumber'),
                       fill_value=-9999)
    cdf.createVariable('tb_lbldis',
                       'f8', ('time', 'wavenumber'),
                       fill_value=-9999)
    cdf.createVariable('tb_airs',
                       'f8', ('time', 'wavenumber'),
                       fill_value=-9999)

    for tidx in xrange(nt):
        t = time[tidx]

        #_pull out this time
        data_t = subset(data, time=t)

        #_pull out thiiiisss
        cdf.variables['cloud'][tidx] = unique(data_t.cloud, unique=True)
        cdf.variables['latitude'][tidx] = unique(data_t.latitude, unique=True)
        cdf.variables['longitude'][tidx] = unique(data_t.longitude,
                                                  unique=True)

        #_sort these
        idx = argsort(data_t.wavenumber)

        cdf.variables['lbldis'][tidx] = data_t.lbldis[idx]
        cdf.variables['airs'][tidx] = data_t.airs[idx]

    cdf.close()
Exemple #2
0
def obsnew_field( recs, nx=360, ny=180, **kwargs ):
	'''
	Takes recarray of obsnewdata and puts it into a 2d field for plot

	recs	: cl.sat_object
	
	Will crash if more than 1 dtg pass
	'''
	dtg = lt.unique( recs.dtg, unique=True )

	lat = np.linspace( -89.5, 89.5, ny )
	lon = np.linspace( -179.5, 179.5, nx )
	
	#_create dummy array to hold values
	tau = np.zeros((ny,nx)) - 9999.
	
	#_convert lats and lons to indices
	i, j = lt.ll2ij( recs.lat, recs.lon, lat, lon )

	tau[j,i] = recs.tau
	idx = np.where( tau == -9999. )
	tau[idx] = 0.
##	tau = np.ma.masked_where( tau == -9999., tau )
##	tau = np.ma.MaskedArray.filled( tau, 0. )
##	tau = np.linspace(0.1,0.8,nx*ny).reshape(ny,nx)
	tau = cl.var( tau, attrv=(lat,lon) )
	return tau
Exemple #3
0
def plot_forker( recs, **kwargs ):
	'''	forks plots based on dtg	'''
	dtgs = lt.unique( recs.dtg )
	groups = lt.setup_groups( dtgs, **kwargs )

	#_loop over proc groups
	for group in groups:
		children = []
		for dtg in group:
			pid = os.fork()
			if pid != 0:
				children.append(pid)
			elif pid == 0:
				#_pull out day
				day = ln.subset( recs, dtg=dtg )

				#_pass day to plotter
				plot_sat( day, **kwargs )
				os._exit(0)

		for kid in children: os.waitpid(kid,0)
Exemple #4
0
def plot_sat( recs, field='aod', path='.', **kwargs ):
	'''
	rec	: np.recarray, 

	plots all sensor data and differenc plots and whatever else ed wanted
	'''
	import matplotlib.pyplot as plt
	import matplotlib.cm as cm
	from matplotlib.colors import LogNorm
	import libnva 	as ln
	import libcmap 	as lc
	reg_dict = lm.regions()
	plt_dict = lm.fields()


	#_pull out metadata for title
	dtg = lt.unique( recs.dtg, unique=True )
	sensor = lt.unique( recs.sensor, unique=True )
	long_name = lt.unique( recs.long_name, unique=True )
	wavelength = lt.unique( recs.wavelength, unique=True )
	level = lt.unique( recs.level, unique=True )
	title = ' '.join(( lt.human_date(dtg), sensor, long_name ))
	plt.title( title, size='small', ha='left', va='bottom',position=(0.,1.))

	#_plot specifics
	corn = reg_dict['global']['corn']
	grid = reg_dict['global']['grid']
	delt = reg_dict['global']['delta']

	#_setup colors
	icap = lc.rgbcmap( 'icap', N=256 )

	#_get specifics for field
	field = field.lower()
	levs = [0.1,0.2,0.4,0.8]
	norm = LogNorm( levs[0], levs[-1] )

	#_setup output name
	dir_out = '/'.join(( path, 'sat_01', 'global', dtg ))
	file = '_'.join(( dtg, 'total_aod', str(wavelength), sensor.lower(), 
		'l'+str(level)+'.png'))
	file = '/'.join(( dir_out, file ))
	lt.mkdir_p( dir_out )

	#_generate map to plot over
	m = ln.draw_map( grid, corn, 'cyl', delt, fill='white', **kwargs )

	#_put into 2d array
	tau2d = obsnew_field( recs, **kwargs )

	#_put data onto grid
	d, x, y = ln.map_data( tau2d, m )

	#_plot data
	CS = m.pcolormesh( x, y, d, cmap=cm.jet, norm=norm, rasterized=True )

	#_generate colorbar
	bar = plt.colorbar( CS, orientation='horizontal', shrink=0.8,
		aspect=40, pad=0.05, ticks=levs )
	bar.set_ticklabels( levs )

	dbg(file)
	plt.savefig(file)
	plt.close()
def join_icap(aod,
              fhr=120,
              fstrt=0,
              nt=None,
              finc=6,
              members=lm.current_icap(),
              **kwargs):
    '''
	Put all icap forecasts on common NAAPS grid for usage with ensemble 
	statistics
	
	require_all limits the returned values to timesteps when every model 
	present (at all) 
		can provide data.  So if MACC is in the mix, no 00z.
	'''
    if 'ICAP' in members: members.remove('ICAP')
    dbg(aod.size)

    #_Calculate last dtg
    species = [s.lower() for s in mod_dict['ICAP']['specs']]
    nx = mod_dict['ICAP']['nx']
    ny = mod_dict['ICAP']['ny']
    lons = np.linspace(-179.5, 179.5, nx)
    lats = np.linspace(-89.5, 89.5, ny)
    finc = 6

    icap = cl.model_object()
    vars = lm.aod_vars()

    #_Create list of models with ANY data and icap models
    #_Loop over species, join models we have for specs
    dtg_valds = set(aod.dtg_vald)  #_list of unique dtgs
    dtg_init = lt.unique(aod.dtg_init)[0]

    #_Create array of for missing data
    ##	nan_2darray = np.empty((ny, nx))
    ##	nan_2darray[:] = NaN #_There's gotta ba shorthand for this
    nan_2darray = np.zeros((ny, nx)) - 9999.

    nens_max = len(members)
    for spec in species:
        dbg(spec, l=2)
        long_name = vars[spec]['long_name']
        for dtg_vald in dtg_valds:
            #_make recarray for one dtg, spec, but multiple models
            aod_sub = ln.subset(aod,
                                variable=spec,
                                model=members,
                                dtg_vald=dtg_vald)

            #_regrid models
            aod_rgd = np.empty((nens_max, ny, nx))
            aod_rgd[:] = -9999.
            for e in np.arange(nens_max):
                #_get model name and append it to dimension
                name = members[e]

                #_pull gridded data for specific model
                tmp = ln.subset(aod_sub, model=name)
                if tmp.size == 1:  #_Should have single rec
                    d = tmp.values[0]
                    x = d.lon
                    y = d.lat

                    #_Regrid model data to icap x/y
                    aod_rgd[e, :, :] = lt.regrid_field(x, y, d, lons,
                                                       lats).transpose()
                elif tmp.size == 0:  #_Model data missing
                    aod_rgd[e, :, :] = nan_2darray.copy()
                else:
                    print 'How did this happen?'
                    return -1

            #_Get indices that are non-physical
            neg_idx = np.where(aod_rgd < -1e-5)
            aod_rgd[neg_idx] = -9999.  #_SLOW
            aod_rgd = np.ma.masked_where(aod_rgd == -9999., aod_rgd)

            #_Convert to masked array and count present models
            nens = ln.check_members(aod_rgd)
            ###			miss = ( aod_rgd[:,0,0] == -9999. ).tolist().count(True)
            ###			nens = nens_max - miss

            data = cl.var(aod_rgd, attrv=(
                members,
                lats,
                lons,
            ))
            dimsize = data.shape
            dimname = (
                'member',
                'lat',
                'lon',
            )
            vhr = lt.find_runlength(dtg_init, dtg_vald) / 3600

            icap.resize(icap.size + 1)
            icap[-1] = (data, 'ICAP', dtg_init, dtg_vald, vhr, 'global', spec,
                        nens, dimname, dimsize, '', long_name)

    #_Limit to forecasts every finc hours
    idx_fhr = np.where(icap.fhr % finc == 0)[0]
    icap = icap[idx_fhr]

    return icap
def filter(records,
           strict_icap=True,
           members=['NAAPS', 'GEOS5', 'MACC', 'MASINGAR', 'NGAC'],
           modes=False,
           **kwargs):
    '''
	Builds recarray model_object() containing only ICAP records that
		1. Contain all models in members
		2. Contain all species of that member as defined in libmeta

	records	: model_object(),	np.recarray() of aod model data
	members	: list,			list of names to require to return

	'''
    dbg(records.size)
    tmp_model = lt.unique(records.model)
    if tmp_model != ['ICAP']:
        dbg(('filter for icap only', tmp_model), l=3)
        return records
    if not strict_icap:
        dbg(('icap not set to strict, returning'), l=3)
        return records

    #_Make expected species list for each model
    specs = ln.expected_vars()

    #_Initialize return object
    out = cl.model_object()

    #_REDUCE________________________________________________________________
    #_CLEANUP_______________________________________________________________
    #_Remove records lacking any of the required members
    #_Loop over each ICAP record
    for rec in records:
        #_take slice to check for masked members
        mask = rec.values[:, 0, 0].mask
        v = rec.variable

        #_loop over each model for this record, see if variable
        # is both expected and present
        desired = []
        for model in members:
            #_make list of expected species for each model
            idx = rec.values.member.tolist().index(model)
            ###			dbg(( rec.values.member, model, idx ))
            #_make list of indices to keep
            desired.append(idx)

            #_see if model is masked, and if so, break loop
            # leaving record out
            test = mask if type(mask) == np.bool_ else mask[idx]
            if test and v in specs[model]:
                dbg((rec.dtg_vald, v, 'filtered'), l=1)
                dbg((model, 'was the cause'), l=1)
                break

        #_if it makes it passed all members, add to return array
        else:
            #_Need to reducse attrv to plug var back into recarry
            atn, atv = ln.get_attr(rec)
            mem_idx = atn.index('member')
            atv[mem_idx] = atv[mem_idx][desired]

            #_Update dimsize
            vals = rec.values.copy()
            vals_out = vals[desired, :, :]
            rec.dimsize = vals_out.shape

            #_Put back into original record
            rec.values = cl.var(vals_out, attrn=atn, attrv=atv)
            out = lt.merge((out, rec))

    dbg(out.size)
    return out