Esempio n. 1
0
def get_colloc_crap(i, file_col):
    from pyhdf.SD import SD, SDC
    from libtools import dbg
    dbg(file_col)
    hdf = SD(file_col, SDC.READ)
    x = hdf.select('Master_Index_1')[i]
    a = hdf.select('Master_Index_2')[i]
    return x, a, hdf.attributes()['fname_AIRS']  #_WRS
Esempio n. 2
0
def check_ctp(fov, wave_obs, apriori=[], dynamic_CTP=False, **kwargs):
    '''
	setting dyanmic_CTP to True replaces the
	apriori cloud top pressure with one from a separate 
	source.  Currently, only supporting dual regression
	heights from Scanning-HIS
	'''
    from libgeo import p2z, planck_inv
    from lblrtm_utils import microwindow_average
    if not dynamic_CTP:
        return

    #_loop over each layer
    for layer in apriori:

        #_skip aerosol layers
        if layer['type'] != 'cloud':
            continue

        #_giving source priority by list order
        for src in dynamic_CTP:

            if src == 'SHIS':

                #_calc cloud top height
                CTP = getattr(fov, '{0}_CTP'.format(src))
                CTZ = p2z(CTP) / 1e3

                #_get brightness temperatures for a few windows
                obs_rads = getattr(fov, '{0}_radiances'.format(src))
                r, w = microwindow_average(obs_rads, wave_obs, -26)
                bt = planck_inv(r, w * 100, domain='wavenumber')[-3:].mean()

                #_if apprioriate layer and there is a value, use SHIS-DR
                if CTZ > 0:
                    layer['z_top'] = CTZ
                    layer['z'] = CTZ - 0.1

                    dbg(('WRS0', CTZ))
                #_check if 'ice' cloud
                if bt < 273.15:  #_maybe lower

                    #_add ice ssp to list and change cloud database to it
                    ice = '/data/wsessions/lbldis_inputs/' \
                     + 'ssp_db.mie_ice.gamma_sigma_0p100'
                    kwargs['ssp_db_files'].append(ice)
                    layer['dbnum'] = len(kwargs['ssp_db_files'])

            #_other source options here elif 'cloudsat' or whatever
            else:
                continue

            #_once complete for one source, skip rest
            break
Esempio n. 3
0
def run_main(**kwargs):

	#_read fields we want to plot into dictionary
	data = read_period(**kwargs)
	dbg(('DONE READING'))

	#_plot time frame
	#_loop over each thing that is more than ~an hour apart
	windows = get_windows(data)
	#for e0, e1 in windows:
##	for e0, e1 in [[0, 9e9]]:
		kwargs.update({'e0' : e0, 'e1' : e1})	
		plot_period(data, **kwargs)

		#_remove what we just used up
		idx = (data.epoch >= e0) * (data.epoch <= e1)
		data = data[idx == False]
		dbg('done for one')
Esempio n. 4
0
def get_windows(d):
	from numpy import diff, arange
	from libtools import epoch2iso as e2i

	#_pull out just optimal estimation times and put them in order
	oe = d[d.name == 'oe']
	oe = oe[oe.epoch.argsort()]
	e = oe.epoch

	#_make a list of where more than an hour passes between retrievals
	idx = arange(e.size - 1)[diff(e) > 3600.]
	es = []
	time_str = e[0] 
	for ii, (x, y) in enumerate(zip(e[idx], e[idx+1])):
		time_end = y+1
		es.append([time_str, time_end])
		time_str = y+2

	es.append([time_str, e[-1]])

	for x, y in es:
		dbg(e2i((x,y)))

	return es 
Esempio n. 5
0
def check_atp(fov, apriori=[], dynamic_ATP=False, **kwargs):
    '''

	'''
    if not dynamic_ATP:
        return

    from hs3_utils import get_naaps_ctp
    from libgeo import p2z
    from numpy import ndarray

    #_loop over each aerosol layer
    for layer in apriori:
        dbg('LAYER0')
        if layer['type'] != 'aerosol':
            continue

        #_loop over potential sources by priority
        for src in dynamic_ATP:
            dbg(('LAYER1', src))

            if src == 'NAAPS':

                arg = (fov.SHIS_epoch, fov.SHIS_latitude, fov.SHIS_longitude)
                CTP, CBP = get_naaps_ctp(
                    *arg, **kwargs)  #_layer top and bot pressures
                CTZ = p2z(CTP) / 1e3
                CBZ = p2z(CBP) / 1e3

                #_if apprioriate layer and naaaps, use
                if ((type(CTZ) == list or type(CTZ) == ndarray) and max(CTZ) > 0) or   \
                 (type(CTZ) == float and CTZ > 0):
                    dbg(('WRS0', CTZ))

                    #_update layer
                    layer['z_top'] = max(CTZ)
                    layer['z'] = max(CBZ)
                    dbg(('LAYER3', CTZ))

            ##	elif src == 'SHIS':

            break
Esempio n. 6
0
if __name__ == '__main__':
    from hs3_utils import Flight_segment
    from qsubmissions import lblrtm_hs3_lim, lbldis_hs3_lim
    from hs3_utils import find_shiscplgdas
    import re

    ############################################################################
    #_PLOTTING ONLY
    if 0:  #_WITH RETRIEVALS
        ppath = namelist.get('dir_plot')
        namelist.update(
            {' dir_plot': os.path.join(ppath, out_label, 'DASHBOARD')})
        for dtg, values in segments.iteritems():
            #_plot comparisons between OD from SHIS and CPL
            # SEPARATE! CANNOT RUN WITH real_case_qsub()
            dbg(dtg)
            final = 'lbldis_output.{0}.final.cdf'.format(out_label)
            clear = 'lbldis_output.{0}.clear.cdf'.format(out_label)
            namelist.update({
                'lbldis_output': final,
                'lbldis_clear': clear,
            })
            namelist.update(values)
            flight = Flight_segment(dtg)
            flight.plot(**namelist)  #_replot everything

    if 0:  #_GENERATE SUMMARY
        for out_label in out_labels:
            for dtg, values in segments.iteritems():
                namelist['dtg'] = dtg
                final = 'lbldis_output.{0}.final.cdf'.format(out_label)
Esempio n. 7
0
def run(
        idx_cross,
        idx_along,
        experiment,
        rerun=False,
        pk_file='/home/wsessions/data/updated_airscal.pk',  #_WRS
        dir_airs=os.path.join(os.environ['PRODUCTS'], 'sips/airs'),  #_WRS
        dir_lbl=os.path.join(DIR_PROD, 'LBL-RTM'),
        file_col=None,
        **kwargs):
    ''' idx defines which profile index to run simulation on '''
    from lblrtm_utils import run_lblrtm_profile
    import re
    from pickle import load
    from oe_utils import read_profile_gdas, read_airs
    from libtools import epoch2dtg, remote_copy, dbg

    #_read in run options
    kw = {
        'iatmm': 1,
        'v1': 600.0,
        'v2': 2500.0,
        'ioddd': 0,
        'dummy_levs': 0.5,
        'dir_lblrtm_fmt': '/data/wsessions/LBL-RTM/{0}.{1}',  #_experiment, dtg
        'cntnm': 5,
        'merge': 1,
        'ipunch': 1,
        'iemit': 0,
        'nproc': 1,
        'dv': 0.5
    }
    #'clddef': [{'z': 2.0, 'z_top': 4.0}, {'z': 3.0, 'z_top': 5.0}, {'z': 4.0, 'z_top': 6.0}]}

    #_get press/prof data
    file_key = load(open(pk_file,
                         'rb'))[file_col.split('/')[-1]]['airs']  #_WRS
    file_key = os.path.join(dir_airs, file_key.split('/')[-1])  #_WRS

    #_copy over airs/gdas files

    gdas = read_profile_gdas(file_key, idx_cross, idx_along)
    airs, wave = read_airs(file_key, idx_cross, idx_along)  #_WRS
    profile = gdas[0]
    radfile = airs[0]  #_WRS

    #_open pickle, get out collocation files, use file_col to find associated
    # airs file, which will dictate the GDAS files yes this is dumb.
    pressure = profile.GDAS_pressure
    latitude = radfile.AIRS_latitude  #_WRS
    longitude = radfile.AIRS_longitude  #_WRS
    epoch = radfile.AIRS_epoch  #_WRS
    ##	latitude	= profile.GDAS_latitude
    ##	longitude	= profile.GDAS_longitude
    sensor_altitude = 7077.75  #e3	#_shoudl this be in km?

    #_output and run directory
    dtg = epoch2dtg(epoch, full=True)
    label = '{0}_{1}'.format(experiment, dtg)
    path_out = os.path.join(dir_lbl, label)
    src = kwargs.get('profile_source', 'GDAS')
    kwargs.update({
        'label': label,
        'dir_lblrtm_out': path_out,
        'pressure': pressure,
        'h1': sensor_altitude,
        'latitude': latitude,
        'longitude': longitude,
        'epoch': epoch
    })
    kwargs.update(kw)

    #_do not rerun if unnecessary
    if not rerun and os.path.exists(path_out):
        dbg('LBL-RTM already processed for {0}'.format(path_out))
        return 0

    #_run lblrtm from profile
    run_lblrtm_profile(profile, **kwargs)
    return 0
Esempio n. 8
0
def main(fidx,
         dir_lblrtm_fmt='',
         dir_lblrtm=None,
         dir_plot='.',
         persistence=False,
         hostname=False,
         file_col=None,
         experiment='HS3',
         surf_temp='GDAS',
         rerun=False,
         **kwargs):
    '''
	completely relies upon the kwargs pickle 
	written by optimal_estimation.real_case()

	fidx	int,	Field of view number associated with
					flight segment file in $PRODUCTS/hs3
	'''

    import matplotlib.pyplot as plt
    from optimal_estimation import optimal_estimation as oe
    from numpy import diag, matrix
    from lblrtm_utils import microwindow_average, microwindow_stdev
    from lblrtm_utils import get_surf_temp
    from libtools import mkdir_p, epoch2dtg, dbg
    from shutil import copytree, rmtree, copy2
    from glob import glob
    from oe_utils import read_airs
    from time import sleep
    from hs3_utils import Flight_segment as F
    from os import unlink

    try:
        file_col, hostname = remote_copy(file_col)
    except:
        pass

    #_read in radiance data for profile
    flight = F(**kwargs)
    fov = flight[fidx]
    wave = flight.SHIS_wavenumber
    rads = fov.SHIS_radiances
    errs = flight.SHIS_HBB_NESR

    #_initialize plot
    #	fig, ax = plt.subplots(1, **{'figsize' : (4,1)})
    fig, ax = plt.subplots(1, figsize=(4, 1))
    fig.suptitle(kwargs.get('desc'), size='xx-small')
    out_label = kwargs.get('out_label')
    pname = 'oe_{0}_{1}_f{2:04d}.png'.format(out_label, flight.dtg0, fidx)
    pname = os.path.join(dir_plot, 'OE', pname)
    mkdir_p(os.path.join(dir_plot, 'OE'))
    if os.path.exists(pname):
        os.unlink(pname)

    #_build covariance matrix
    #_when using microwindows, average observation data
    dv = kwargs.get('dv')
    if dv < 0:
        std, d = microwindow_stdev(rads, wave, dv)
        nesr, d = microwindow_average(errs, wave, dv, error=True)
        y, wave_obs = microwindow_average(rads, wave, dv)

        #_std deviation within microwindows applied to ref
        # blackbody
        cov = matrix(diag(nesr) + diag(std))

    else:
        cov = matrix(diag(errs))

    #_allow for arbitrary lblrtm directory, get location of LBL-RTM output
    dtgfov = (flight.dtg0, flight.dtg1, fidx, experiment, flight.dtg0[2:])
    dir_lblrtm = dir_lblrtm_fmt.format(*dtgfov)
    dir_lblrtm_arc = dir_lblrtm_fmt.format(*dtgfov) if dir_lblrtm is None \
     else dir_lblrtm

    #_update kwargs based upon dynamic ATP/CTP
    surf_temp = get_surf_temp(fov,
                              surf_temp_src=surf_temp,
                              dir_lblrtm=dir_lblrtm_arc,
                              **kwargs)
    check_ctp(fov, wave, **kwargs)
    check_atp(fov, **kwargs)

    #_if already run and not desired to redo, kill
    lbldis_output = kwargs.get('lbldis_output')
    file_out = os.path.join(dir_lblrtm_arc, lbldis_output)

    #_check last field of view for persistence
    if type(persistence) == dict and fidx != 0:
        print 'DONT USE THIS IS PROBABLY WRONG DIRECTORY'
        dtgfov_tmp = (experiment, dtg)
        dir_lblrtm_arc_tmp = dir_lblrtm_fmt.format(*dtgfov_tmp)
        print 'CHECKING PERSIST', kwargs['apriori']

        #_put this in a try clause to allow for failure
        try:
            check_persistence(dir_lblrtm_arc_tmp, persistence, **kwargs)
        except IOError:
            pass
        print 'CHECKING OUTSIST', kwargs['apriori']

    #_copy the archive LBLRTM directory locally and work form there
    dir_lblrtm = os.path.join(DIR_WORK,
                              '/'.join(dir_lblrtm_arc.split('/')[-2:]))

    if not os.path.exists(dir_lblrtm + '/TAPE5'):
        killcount = 0
        while killcount < 10:
            try:
                mkdir_p(dir_lblrtm)
                [copy2(cp, dir_lblrtm) for cp in glob(dir_lblrtm_arc + '/*')]
                break
            except:
                #_if fails, try ten times
                sleep(10)
                killcount += 1

            if killcount >= 10:
                exit()

    #_update keyword arguments for this fov
    args = (rads, wave, cov)
    kwargs.update({
        'fig': fig,
        'ax': ax,
        'pname': pname,
        'nproc': 1,  #_just in case I forgot to tone it down
        'dir_lblrtm_arc': dir_lblrtm_arc,
        'dir_lblrtm': dir_lblrtm,
        'surf_temp': surf_temp,
    })

    #_pull_the_trigger_#
    #__________________#
    dbg('LAUNCHING...')
    oe(*args, **kwargs)

    #_archive crap
    for out_file in glob(dir_lblrtm + '/*' + out_label + '*'):

        killcount = 0
        out = os.path.join(dir_lblrtm_arc, out_file.split('/')[-1])

        #_remove old output if rerunning
        if rerun and os.path.exists(out):
            unlink(out)

        while not os.path.exists(out):
            try:
                dbg(('archiving', out_file, '->', out))
                copy2(out_file, dir_lblrtm_arc)
            except:
                dbg(('failed to copy', out_file, '->', out))
                sleep(10)

    #_plot up results with flight data
    kwargs.update({'dir_plot': os.path.join(dir_plot, 'DASHBOARD')})
    flight.plot(fidx=fidx, rerun=rerun, **kwargs)

    #_remove temporary directory
    rmtree(dir_lblrtm)
Esempio n. 9
0
def read_period(plot_oe=False, plot_caliop=False, **kwargs):
	'''
	plot CALIOP and retrieval for time frame
	return record array with
	dtype = data, epoch, lat, lon, source
	'''
	from glob import glob
	from oe_utils import read_airs, read_caliop, get_airs_times, read_oe
	from oe_utils import get_collocation_indices, precheck
	from pyhdf.SD import SD, SDC
	from libtools import epoch2dtg as e2d
	from numpy import array, append, vstack, recarray, ndarray

	#_pull out some vars
	e0 = kwargs.get('e0', 0)
	e1 = kwargs.get('e1', 9e9)
	latlon = [-90,90,-180,180]

	#_read in previously generated file list
	files_colloc, pre_create = precheck(**kwargs)

	#_create recarray for these things.
	dtype = [('values', ndarray), ('lat','f4'), ('lon','f4'), ('epoch', 'f8'),
			('name','a20')]
	data = recarray((0,), dtype)

	#_loop over collocation times and read in crap
	all_oe = []
	for ii, file_colloc in enumerate(files_colloc):
		#_kklluuddggee
		file_colloc = file_colloc.split('/')[-1]
		file_colloc = os.path.join(os.environ['PRODUCTS'], 'colloc', file_colloc)
		dbg(file_colloc)

		#_get name of AIRS file
		hdf = SD(file_colloc, SDC.READ)
		file_airs = hdf.attributes()['fname_AIRS'] 
		file_caliop	= hdf.attributes()['fname_CALIOP']	

		#_check if ANYTHING is within the period
		e0_airs, e1_airs = get_airs_times(file_airs)
		
		#_if completely outside window, skip
		if (e1_airs < e0) or (e0_airs > e1):
			continue

		#_get indices
		idx_x, idx_l, idx_c = get_collocation_indices(file_colloc)
	
		#_read in airs and caliop fields
		#_there's no time data in colloc file, so use AIRS to filter
		airs, wave = read_airs(file_airs, idx_x, idx_l, **kwargs)

		#_check airs times
		idx = (airs.AIRS_epoch >= e0) * (airs.AIRS_epoch < e1)
		lat0, lat1, lon0, lon1 = latlon
		lidx = (airs.AIRS_latitude >= lat0) * (airs.AIRS_latitude < lat1)
		nidx = (airs.AIRS_longitude >= lon0) * (airs.AIRS_longitude < lon1)
		idx = idx * lidx * nidx	

		#_skip if none match
		if idx.sum() == 0:
			continue

		#_keep track of which files for future runs
		if pre_create:
			pre_create.write(file_colloc + '\n')	

		#_limit to 4d box
		airs = airs[idx]
		idx_c = idx_c[idx]
		idx_x = idx_x[idx]
		idx_l = idx_l[idx]

		#_read oe output
		if plot_oe:
			dbg('READING AIRS')
			oe = read_oe(airs.AIRS_epoch, **kwargs)
			new = len(oe)

			#_add to recarray
			data.resize(data.size + new)
			data.epoch[-new:] = airs.AIRS_epoch[:]
			data.lat[-new:] = airs.AIRS_latitude[:]
			data.lon[-new:] = airs.AIRS_longitude[:]
			data.epoch[-new:] = airs.AIRS_epoch[:]
			data.name[-new:] = 'oe'	
			for nnn, v in enumerate(oe):
				data.values[-new+nnn] = v #oe[nnn]

		#_do caliop
		if plot_caliop:
			idx_c = idx_c[:,0][:,None]
			caliop = read_caliop(file_caliop, idx_c, **kwargs)

			#_flatten caliop and drop missing fields
			mask = idx_c.flatten() != -1
			backscatter = vstack(caliop.CALIOP_total_attenuated_backscatter_532)[mask]
			longitude = vstack(caliop.CALIOP_longitude).flatten()[mask]
			epoch = vstack(caliop.CALIOP_epoch).flatten()[mask]
			latitude = vstack(caliop.CALIOP_latitude).flatten()[mask]
			
			new = mask.sum() #caliop.size
			data.resize(data.size + new)
			data.epoch[-new:] = epoch[:] 
			data.lat[-new:] = latitude[:] 
			data.lon[-new:] = longitude[:] 
			data.name[-new:] = 'caliop'	
			for nnn in range(new):
				data.values[-new+nnn] = backscatter[nnn]

		##	#_flatten caliop and drop missing fields
		##	mask = idx_c != -1
		##	longitude = vstack(caliop.CALIOP_longitude).flatten()[mask]
		##	latitude = vstack(caliop.CALIOP_latitude).flatten()[mask]
		##	epoch = vstack(caliop.CALIOP_epoch).flatten()[mask]
		##	backscatter = vstack(caliop.CALIOP_total_attenuated_backscatter_532)[mask]
		##	
		##	new = idx_c.size
		##	data.resize(data.size + new)
		##	data.epoch[-new:] = epoch[:] 
		##	data.lat[-new:] = latitude[:] 
		##	data.lon[-new:] = longitude[:] 
		##	data.name[-new:] = 'caliop'	
		##	for nnn in range(new):
		##		data.values[-new+nnn] = backscatter[nnn]

	#_close precreated file
	if pre_create:
		pre_create.close()

	#_send recarray back to sender
	return data
Esempio n. 10
0
def plot_period(data, e0, e1, plot_oe=True, plot_caliop=False,
	cmfile='/home/wsessions/lib/cmaps/calipso-backscatter.cmap', **kwargs):
	''' plot all the crep in data '''
	#_read in retrieval data for period
	##	obs = read_oe(airs.AIRS_epoch, **kwargs)
	import matplotlib.pyplot as plt
	from libcmap import rgbcmap
	from libtools import epoch2iso as e2i
	from numpy import array, vstack, arange, recarray, append
	from libtools import shrink_ticks
	from numpy.ma import masked_where as mw
	import ccplot_cmap as ccc

	nplot = sum([plot_oe, plot_caliop])
	fig, ax = plt.subplots(nplot)

	try:
		ax_oe, ax_cal = ax
	except:
		ax_oe = ax

	dbg(e2i([e0, e1]))

	#_plot retrievals from optimal estimation
	#_AIRS_____#
	def fix_oe_crap(d, **kwargs):
		''' get the oe values out of stupid dictionaries '''
		example = array([f is not None for f in d.values]) 
		example = d[example.argmin()]
		ssp_dbs = example.values.ssp_db
		names = [n.split('.')[-2] for n in ssp_dbs]

		#_build recarray to store these
		nssp = len(ssp_dbs)
		nt = d.size
		dtype = [('tau','f4'), ('lat','f4'), ('lon','f4'), ('epoch', 'f8'),
				('name','a20'), ('habit', 'a20'), ('ref_wvnum', 'f4'),
				('z_top', 'f4'), ('z', 'f4'), ('ref', 'f4')]  
		data = recarray((nssp*nt), dtype)

		#_loop over layers and pull all this crap out
		for i, vals in enumerate(d.values):
			for ii, l in enumerate(vals.layers):
				idx = ii + nssp*i

				habit = vals.ssp_db[l['dbnum']].split('.')[-2]
				arg = (l['tau'][0], d.lat[i], d.lon[i], d.epoch[i], d.name[i],
					habit, l['ref_wvnum'], l['z_top'], l['z'], l['ref'])
				data[idx] = arg

		data = data[data.tau != -9999]		
		return data

	#_pull out oe
	oe = data[data.name == 'oe']
	idx = array([d is not None for d in oe.values])
	oe = oe[idx]
	oe = oe[oe.epoch.argsort()]
	oe = oe[(oe.epoch >= e0) * (oe.epoch <= e1)]

	#_pull out optical depths and put into dictionary by specie
	oe = fix_oe_crap(oe, **kwargs)
	ssps = set(oe.habit)
	for habit in set(oe.habit):
		if habit == 'mie_wat':
			continue
	
		#_pull out this ssp habit
		oe_habit = oe[oe.habit == habit]
		oe_habit = oe_habit[oe_habit.epoch.argsort()]

		#_
		x = oe_habit.epoch
		y = oe_habit.tau
		nt = x.size
		max_oe_epoch, min_oe_epoch = x.max(), x.min()
		
		ax_oe.plot(x, y, label=habit, linewidth=0.5)
		xticks = append(x[::nt/5], x[-1])
		if xticks[-1] - xticks[-2] < nt/20:
			xticks[-2] = xticks[-1]
		xticklabels = [tmp[11:19] for tmp in e2i(xticks)]
		ax_oe.set_xticks(xticks)
		ax_oe.set_xticklabels(xticklabels)
		shrink_ticks(ax_oe)

		#_crop plotting area.
		ax_oe.set_xlim(x.min(), x.max())
		ax_oe.set_ylim(0, 3.)

	#_drop box, make smaller
	ax_oe.legend()

	##########
	#_CALIOP_#
	if plot_caliop:
		import numpy as np
	
		#_ pull out caliop data
		caliop = data[data.name == 'caliop']
		idx_c = (caliop.epoch >= e0) * (caliop.epoch <= e1)
		caliop = caliop[(caliop.epoch >= min_oe_epoch) * (caliop.epoch <= max_oe_epoch)]
	##	caliop = caliop[(caliop.epoch >= e0) * (caliop.epoch <= e1)]
		caliop = caliop[caliop.epoch.argsort()]
		x = caliop.epoch

		#_put into ONE BIG OL 2d array 
		caliop = vstack(caliop.values)
		caliop = mw(caliop == -9999, caliop)
		calipo = np.ma.masked_invalid(caliop)
	##	caliop[caliop < 0] = 0.
	##	caliop[caliop > 1.5] = 1.5

		#_coords
		y = arange(caliop.shape[1])

		dbg(('REMOVE TRUNCATION'))
		#_load up backscatter colormaps
		cmap, norm, ticks = ccc.loadcolormap(cmfile, 'caliop')
	##	cb = ax_cal.imshow(caliop.T, cmap=cmap, norm=norm, interpolation='nearest')
		im = ax_cal.pcolormesh(x, y, caliop.T, cmap=cmap, norm=norm)#, vmin=0, vmax=1.5)
		ax_cal.set_xlabel('{0:e}, {1:e}'.format(caliop.max(), caliop.min()))

		#_label bottom
		xticks = append(x[::x.size/5], x[-1])
		if xticks[-1] - xticks[-2] < 100:
			xticks[-2] = xticks[-1]
		xticklabels = [tmp[11:19] for tmp in e2i(xticks)]

		ax_cal.set_xticks(xticks)
		ax_cal.set_xticklabels(xticklabels)
		ax_cal.invert_yaxis()
		ax_cal.get_yaxis().set_visible(False)
		ax_cal.set_xlim(x.min(), x.max())
		shrink_ticks(ax_cal)

	##	cb = fig.colorbar(im, ax=axes, cax=cbaxes, orientation="vertical",
    ##                  extend="both", ticks=ticks, norm=norm,
    ##                  format=SciFormatter())

	##	cb.set_label(name)

	pname = 'oe_{0}_{1}.png'.format(e2i(e0), e2i(e1))
	pname = pname.replace(':', '-')
	dbg(pname)
	plt.savefig(pname)
Esempio n. 11
0
def lbldis_sbatch(file_col,
                  fidx=None,
                  dir_tmp=os.environ['WORK'],
                  dir_batch=os.path.join(os.environ['WORK'], 'batch_scripts'),
                  hostname=False,
                  **kwargs):
    '''
	2016.04.20
	Run LBL-RTM on profiles defined

	fidx limits the indices run

	purpose of thise level is to generate appropriate sbatch and kwargs option files
	'''
    from libtools import combination, dbg, mkdir_p, capture_stdout
    from pyhdf.SD import SD, SDC
    from pickle import dump
    import re

    #_had to do all this to get the size of the collocated/avg
    #_setup submission command
    scpt = os.path.expanduser('~/lib/run_oe.py')

    #_open collocation file
    hdf = SD(file_col, SDC.READ)
    ##	idx = range(hdf.select('Master_Index_1')[:].size) if fidx is None else fidx
    idx = fidx

    #_get out_label
    out_label = kwargs.get('out_label')

    #_loop over collocated files, run lblrtm
    for xx, fov_idx in enumerate(idx):

        #_create job label for sbatch
        job_label = '{0}_{1:05d}'.format(file_col.split('/')[-1][:-4], fov_idx)

        #_initialize output for batch script
        out = '#!/bin/bash\n'
        out += '#SBATCH --job-name={0}\n'.format(job_label)
        out += '#SBATCH --partition=all\n'
        out += '#SBATCH --share\n'
        out += '#SBATCH --time=2:00:00\n'
        out += '#SBATCH --ntasks=1\n'
        out += '#SBATCH --cpus-per-task=1\n'
        out += '#SBATCH --nice=1000\n'
        out += '#SBATCH --output=/odyssey/scratch/%u/logs/{0}-%A.txt\n'.format(
            job_label)
        out += 'module purge\n'
        out += 'module load license_intel\n'
        out += 'module load impi\n'
        out += 'module load intel/15.0-2\n'
        out += 'module load hdf5/1.8.14\n'
        out += 'module load netcdf4/4.3.3\n'
        out += 'module load anaconda27/base_2015a_sharppy13\n'
        out += 'export TMPDIR={0}/${{SLURM_JOB_NAME}}.${{SLURM_JOB_ID}}\n'.format(
            dir_tmp)
        out += 'mkdir -p $TMPDIR\n'

        #_add environment variables to script
        out += '\n'.join(['export {0}={1}'.format(var, os.environ[var]) \
                  for var in ['PYTHONPATH', 'PRODUCTS', 'WORK', 'PATH', 'LOG']])

        #_if needing to remote copy, add hostname to front
        args = ('.'.join(file_col.split('/')[-1].split('.')[1:3]), fov_idx,
                out_label)
        kname = 'kwgs.{0}.{1:04d}.{2}.pk'.format(*args)
        file_kwarg = os.path.join(DIR_TMP, kname)
        if hostname:
            file_tmp = '{0}:{1}'.format(hostname, file_col)
        else:
            file_tmp = file_col

        sname = '{0}/sbatch.lbldis.{1}'.format(dir_batch, job_label)
        kwargs.update({'file_col': file_tmp, 'sbatch_file': sname})

        #_DO THIS ONCE WE USE OTHER SENSORS WRS
        #_generate kwarg file that scrpt will load to get settings
        dump(kwargs, open(file_kwarg, 'wb'))
        if hostname:
            file_tmp = '{0}:{1}'.format(hostname, file_kwarg)
        else:
            file_tmp = file_kwarg

        #_build actual script call string
        out += 'source activate my_root\n'
        out += 'echo `which python`\n'
        out += ' '.join((scpt, str(fov_idx), file_tmp)) + '\n'
        out += 'rm -rf $TMPDIR\n'

        #_write batch script
        with open(sname, 'w') as f:
            ##	print out
            f.write(out)
        #_put it all together and submit
        print sname

        #_GET BATCH ID AND PASS TO NEXT AS OK IF PERSISTENCE SET
        ##	cmd = ' '.join(('sbatch', dependency, sname))
        cmd = ' '.join(('sbatch', sname))
        dbg(cmd)
        stdout = capture_stdout(cmd)

        #_get ID
        last_jid = int(
            re.search('Submitted batch job (\d+)', stdout[0]).group(1))
Esempio n. 12
0
def lblrtm_sbatch(file_col,
                  fidx=None,
                  experiment='test',
                  dir_tmp=os.environ['WORK'],
                  dir_batch=os.path.join(os.environ['WORK'], 'batch_scripts'),
                  **kwargs):
    '''
	2016.04.20
	Run LBL-RTM on profiles defined

	fidx limits the indices run
	'''
    from libtools import combination, dbg, mkdir_p, capture_stdout
    from pyhdf.SD import SD, SDC
    import re

    #_had to do all this to get the size of the collocated/avg
    #_setup submission command
    scpt = os.path.expanduser('~/lib/run_lblrtm.py')

    #_open collocation file
    hdf = SD(file_col, SDC.READ)
    idx = range(hdf.select('Master_Index_1')[:].size) if fidx is None else fidx

    #_loop over collocated files, run lblrtm
    for xxx, fov_idx in enumerate(idx):

        #_pull out cross and along track indices
        idx_x = hdf.select('Master_Index_1')[fov_idx] - 1
        idx_a = hdf.select('Master_Index_2')[fov_idx] - 1

        #_create job label for sbatch
        job_label = '{0}_{1:04d}_{2:04d}'.format(
            file_col.split('/')[-1][:-4], idx_x, idx_a)

        #_initialize output for batch script
        out = '#!/bin/bash\n'
        out += '#SBATCH --job-name={0}\n'.format(job_label)
        out += '#SBATCH --partition=all\n'
        out += '#SBATCH --share\n'
        out += '#SBATCH --time=2:00:00\n'
        out += '#SBATCH --ntasks=1\n'
        out += '#SBATCH --cpus-per-task=1\n'
        out += '#SBATCH --nice=1000\n'
        out += '#SBATCH --output=/odyssey/scratch/%u/logs/{0}-%A.txt\n'.format(
            job_label)
        out += 'module purge\n'
        out += 'module load license_intel\n'
        out += 'module load impi\n'
        out += 'module load intel/15.0-2\n'
        out += 'module load hdf5/1.8.14\n'
        out += 'module load netcdf4/4.3.3\n'
        out += 'module load anaconda27/base_2015a_sharppy13\n'
        out += 'export TMPDIR={0}/${{SLURM_JOB_NAME}}.${{SLURM_JOB_ID}}\n'.format(
            dir_tmp)
        out += 'mkdir -p $TMPDIR\n'

        #_add environment variables to script
        out += '\n'.join(['export {0}={1}'.format(var, os.environ[var]) \
                  for var in ['PYTHONPATH', 'PRODUCTS', 'WORK', 'PATH', 'LOG']])

        #_DO THIS ONCE WE USE OTHER SENSORS WRS
        '''
		#_generate kwarg file that scrpt will load to get settings
        fmt = 'kwgs.{4}.{2}.{3}.{0:04d}.{1:07d}'
        fmt_args = (i, pid, flight.dtg0, flight.dtg1, out_label)
        file_kwarg = os.path.join(DIR_TMP, fmt.format(*fmt_args))
        pickle.dump(kwargs, open(file_kwarg, 'wb'))	
		'''

        #_build actual script call string
        out += 'source activate my_root\n'
        out += 'echo `which python`\n'
        out += ' '.join(
            (scpt, experiment, str(idx_x), str(idx_a), file_col)) + '\n'
        out += 'rm -rf $TMPDIR\n'

        #_write batch script
        sname = '{0}/sbatch.lblrtm.{1}'.format(dir_batch, job_label)
        with open(sname, 'w') as f:
            print out
            f.write(out)
        #_put it all together and submit
        print sname

        #_GET BATCH ID AND PASS TO NEXT AS OK IF PERSISTENCE SET
        cmd = ' '.join(('sbatch', sname))
        dbg(cmd)
        stdout = capture_stdout(cmd)

        #_get ID
        last_jid = int(
            re.search('Submitted batch job (\d+)', stdout[0]).group(1))
Esempio n. 13
0
def main(fidx,
         dir_lblrtm_fmt='',
         dir_lblrtm=None,
         dir_plot='.',
         persistence=False,
         hostname=False,
         file_col=None,
         rerun=False,
         experiment='HS3',
         surf_temp='GDAS',
         **kwargs):
    '''
	completely relies upon the kwargs pickle 
	written by optimal_estimation.real_case()

	fidx	int,	Field of view number associated with
					flight segment file in $PRODUCTS/hs3
	'''

    import matplotlib.pyplot as plt
    from optimal_estimation import optimal_estimation as oe
    from numpy import diag, matrix
    from lblrtm_utils import microwindow_average, microwindow_stdev
    from lblrtm_utils import get_surf_temp
    from libtools import mkdir_p, epoch2dtg, dbg
    from shutil import copytree, rmtree, copy2
    from glob import glob
    from oe_utils import read_airs
    from time import sleep

    try:
        file_col, hostname = remote_copy(file_col)
    except:
        pass

    #_read in radiance data for profile
    idx_x, idx_a, file_airs = get_colloc_crap(fidx, file_col)
    obs, wave = read_airs(file_airs, idx_x, idx_a, **kwargs)  #_WRS
    if len(obs) > 1:
        raise RuntimeError, "Nope"
    obs = obs[0]

    #_update surface temperature value to be used
    if surf_temp == 'GDAS':
        from oe_utils import read_profile_gdas

        gdas = read_profile_gdas(file_airs, idx_x, idx_a, **kwargs)
        surf_temp = gdas.GDAS_sfc_temperature[0]

    kwargs.update({'surf_temp': surf_temp})

    #_pull out radiances
    rads = getattr(obs, '{0}_radiances'.format('AIRS'))  #_WRS
    errs = getattr(obs, '{0}_nen'.format('AIRS'))  #_WRS

    #_get current location
    dtg = epoch2dtg(obs.AIRS_epoch, full=True)  #_WRS

    #_initialize plot
    #	fig, ax = plt.subplots(1, **{'figsize' : (4,1)})
    fig, ax = plt.subplots(1, figsize=(4, 1))
    fig.suptitle(kwargs.get('desc'), size='xx-small')
    out_label = kwargs.get('out_label')
    pname = 'oe_{0}_{1}_f{2:04d}.png'.format(out_label, dtg, fidx)
    pname = os.path.join(dir_plot, 'OE', pname)
    mkdir_p(os.path.join(dir_plot, 'OE'))
    if os.path.exists(pname):
        os.unlink(pname)

    #_build covariance matrix
    #_when using microwindows, average observation data
    dv = kwargs.get('dv')
    if dv < 0:
        std, d = microwindow_stdev(rads, wave, dv)
        nesr, d = microwindow_average(errs, wave, dv, error=True)
        y, wave_obs = microwindow_average(rads, wave, dv)

        #_std deviation within microwindows applied to ref
        # blackbody
        cov = matrix(diag(nesr) + diag(std))

    else:
        cov = matrix(diag(errs))

    #_allow for arbitrary lblrtm directory, get location of LBL-RTM output
    dtgfov = (experiment, dtg)
    dir_lblrtm = dir_lblrtm_fmt.format(*dtgfov)

    dir_lblrtm_arc = dir_lblrtm_fmt.format(*dtgfov) if dir_lblrtm is None \
     else dir_lblrtm

    #_if already run and not desired to redo, kill
    lbldis_output = kwargs.get('lbldis_output')
    file_out = os.path.join(dir_lblrtm_arc, lbldis_output)
    if not rerun and os.path.exists(file_out):
        dbg(('Scene already processed', file_out))
        return

    #_check last field of view for persistence
    if type(persistence) == dict and fidx != 0:
        print 'DONT USE THIS IS PROBABLY WRONG DIRECTORY'
        dtgfov_tmp = (experiment, dtg)
        dir_lblrtm_arc_tmp = dir_lblrtm_fmt.format(*dtgfov_tmp)
        print 'CHECKING PERSIST', kwargs['apriori']

        #_put this in a try clause to allow for failure
        try:
            check_persistence(dir_lblrtm_arc_tmp, persistence, **kwargs)
        except IOError:
            pass
        print 'CHECKING OUTSIST', kwargs['apriori']

    #_copy the archive LBLRTM directory locally and work form there
    dir_lblrtm = os.path.join(DIR_WORK,
                              '/'.join(dir_lblrtm_arc.split('/')[-2:]))
    if not os.path.exists(dir_lblrtm + '/TAPE5'):
        killcount = 0
        while killcount < 10:
            try:
                mkdir_p(dir_lblrtm)
                ##	[ copy2(dir_lblrtm_arc + '/*', dir_lblrtm) for cp in glob(dir_lblrtm_arc + '/*') ]
                [copy2(cp, dir_lblrtm) for cp in glob(dir_lblrtm_arc + '/*')]
                ##	copytree(dir_lblrtm_arc + '/*', dir_lblrtm)
                break
            except:
                #_if fails, try ten times
                sleep(10)
                killcount += 1

            if killcount >= 10:
                exit()

    #_update keyword arguments for this fov
    args = (rads, wave, cov)
    kwargs.update({
        'fig': fig,
        'ax': ax,
        'pname': pname,
        'nproc': 1,  #_just in case I forgot to tone it down
        'dir_lblrtm_arc': dir_lblrtm_arc,
        'dir_lblrtm': dir_lblrtm
    })

    #_pull_the_trigger_#
    #__________________#
    dbg('LAUNCHING...')
    oe(*args, **kwargs)

    #_plot up results with flight data
    kwargs.update({'dir_plot': os.path.join(dir_plot, 'DASHBOARD')})
    dbg('REPLACE PLOTTING PROCEDURE, FILE_COL TOO SHORT, MAKE SEPARATE')
    ##	flight.plot(fidx=fidx, **kwargs)

    for out_file in glob(dir_lblrtm + '/*' + out_label + '*'):
        killcount = 0
        out = os.path.join(dir_lblrtm_arc, out_file.split('/')[-1])
        while not os.path.exists(out):
            try:
                dbg(('archiving', out_file, '->', out))
                copy2(out_file, dir_lblrtm_arc)
            except:
                dbg(('failed to copy', out_file, '->', out))
                sleep(10)

    #_remove temporary directory
    rmtree(dir_lblrtm)
    #_LBL-RTM, REAL FLIGHT LOOP (must be run before retrieval)
    if 0:  #_GENERATE SUMMARY
        for out_label in out_labels:
            for dtg, values in segments.iteritems():
                namelist['dtg'] = dtg
                final = 'lbldis_output.{0}.final.cdf'.format(out_label)
                clear = 'lbldis_output.{0}.clear.cdf'.format(out_label)
                namelist.update({
                    'lbldis_output': final,
                    'lbldis_clear': clear,
                    'out_label': out_label,
                    'smooth': False
                })
                namelist.update(values)

                dbg('')
                dbg(dtg)
                write_retrieved_qsub(**namelist)
    #	plot_jeff_qsub(**namelist)
    #	plot_real_qsub(**namelist)

    #_PLOT IN ONE
    if 1:
        for dtg, values in segments.iteritems():
            namelist['dtg'] = dtg
            namelist.update({
                'out_labels': out_labels,
                'smooth': False,
                'bt_thresh': 7.,
            })
            namelist.update(values)