예제 #1
0
def read_forecasts( path_fcst=os.environ['PRODUCTS']+'/icap/', label='2012FL',
	sites=None, **kwargs ):
	'''
	Reads merged forecast files produced by ICAP.merge_data.py
	These are collacted with AERONET sites	
	'''
	from glob import glob
	import libtools as lt
	import libnva   as ln
	import libclass as cl
	import numpy    as np
	from multiprocessing import Queue, Process

	path_fcst = '/'.join((path_fcst, 'seasonal_statistics-'+label, 'points'))
	if sites == None:
		files = glob( path_fcst + '/*-*-'+label+'.nc' ) #_AFTER
	else: 
		files = []
		for site in sites:
			files.extend(glob(path_fcst+'/*-'+site+'-'+label+'.nc'))

	records = False
	print( path_fcst )
	dtype = [       ('values', cl.var),
	                ('model', 'a16'),
	                ('epoch', 'i8'),
	                ('fhr', 'i4'),
	                ('code', 'a10'),
	                ('lat', 'f4'),
	                ('lon', 'f4'),
	                ('variable', 'a20'),
	                ('ensemble', 'i4'),
	                ('dimname', tuple),
	                ('dimsize', tuple) ]
	records = np.recarray( (0,), dtype=dtype )

	groups = lt.setup_groups( files, **kwargs )
	for group in groups:
	        l = len(group)
	        q = [None]*l
	        t = [None]*l
	        for i in xrange(l):
	                file = group[i]
	                q[i] = Queue()
	                args = (file,)
	                kwargs.update({'pipe':q[i]})
	                t[i] = Process( target=ln.read_period_points,
	                        args=args, kwargs=kwargs )
	                t[i].start()

	        for i in xrange(l):
	                tmp, attrv = q[i].get()
	                for rec in tmp:
	                        rec.values = cl.var( rec.values,
	                                attrn=['member'], attrv=attrv )
	                records = lt.merge([records,tmp])

	ln.check_member_order( records )

	return records
예제 #2
0
def plot_forker( recs, **kwargs ):
	'''	forks plots based on dtg	'''
	dtgs = lt.unique( recs.dtg )
	groups = lt.setup_groups( dtgs, **kwargs )

	#_loop over proc groups
	for group in groups:
		children = []
		for dtg in group:
			pid = os.fork()
			if pid != 0:
				children.append(pid)
			elif pid == 0:
				#_pull out day
				day = ln.subset( recs, dtg=dtg )

				#_pass day to plotter
				plot_sat( day, **kwargs )
				os._exit(0)

		for kid in children: os.waitpid(kid,0)
예제 #3
0
def run_clear_skies(lblrtm=False,
                    lbldis=False,
                    fmt='/data/wsessions/LBL-RTM_hs3/{3}_{0}.{1}_fov{2:06d}',
                    ecmwf_file='/data/wsessions/era-interim/hs3_2013.nc',
                    **kwargs):
    ''' read in field of views and run cases through lblrtm and lbldis '''
    from clear_sky_fovs import clear_sky as cs
    from hs3_utils import Flight_segment as F
    from qsubmissions import lblrtm_hs3_lim, lbldis_hs3_lim
    from optimal_estimation import clear_sky_radiance as csr
    from libtools import setup_groups
    import os, sys

    if lblrtm and lbldis:
        raise RuntimeError, 'Submit LBLRTM and LBLDIS separate, not synced'

    #_run lblrtm for all
    if lblrtm:
        for file_seg, opts in cs.iteritems():

            #_check if files exist
            if not os.path.exists(file_seg):
                raise IOError, 'Missing file {0}'.format(file_seg)

            #_read in flight
            flight = F(file_seg=file_seg)
            kwargs.update({'file_flight': file_seg})
            kwargs.update(opts)

            #_run lblrtm
            lblrtm_hs3_lim(dir_lblrtm_fmt=fmt, **kwargs)

    #_DO EITHER LBLRTM OR LBLDIS, NOT BOTH
    if lbldis:
        experiment = kwargs.get('experiment')
        for file_seg, opts in cs.iteritems():

            #_check if files exist
            if not os.path.exists(file_seg):
                raise IOError, 'Missing file {0}'.format(file_seg)

            flight = F(file_seg=file_seg)

            #_run lbldis for all
            groups = setup_groups(opts['fidx'], **kwargs)
            for group in groups:
                children = []
                for i in group:
                    pid = os.fork()
                    if pid != 0:
                        children.append(pid)
                    else:
                        surf_temp = get_surf_temp(flight[i], **kwargs)
                        kwargs.update({'surf_temp': surf_temp})

                        #_run lbldis
                        args = (flight.dtg0, flight.dtg1, i, experiment)
                        dir_lblrtm = fmt.format(*args)

                        #_run clear sky case
                        csr(dir_lblrtm, **kwargs)

                        os._exit(0)

                for kid in children:
                    os.waitpid(kid, 0)
def fake_shis_sge(vars=['tau'],
                  ranges={'tau': [
                      0,
                  ]},
                  dir_lblrtm='.',
                  dz=0.2,
                  latitude=0,
                  longitude=0,
                  clddef={},
                  noisey=False,
                  real_shis=None,
                  **kwargs):
    ''' generate fake shis retrieval '''
    from numpy import recarray, array, ndarray, tile
    from libtools import combination as combo
    from lblrtm_utils import write_lbldis_parameters as lbldis_write
    from subprocess import call
    from shis import convolve2shis as c2shis
    from scipy.io import netcdf
    from scipy.interpolate import splev, splrep, interp1d
    from hs3_utils import read_shis
    from numpy.random import random
    from multiprocessing import Process, Queue, Pipe
    from libtools import setup_groups
    from time import sleep

    #_read in real thing for HBB_NESR
    dir_hs3 = kwargs.get('dir_hs3')
    ##	ugh = '{0}/SHIS_rdr20130816T033458end20130816T080506sdr20130817T165537_rad_pcfilt.nc'.format(dir_hs3)
    ##	real_shis = read_shis(ugh, idx=array([0]))
    real_shis = read_shis(real_shis, idx=array([0]))
    hbb = None

    #_lbldis gets persnickety about this
    if 'LB_LIBRARY_PATH' not in os.environ:
        os.environ['LD_LIBRARY_PATH'] = '/opt/netcdf-4.1.2/lib/'

    #_run lbl-rtm with parameters
    combos = combo([ranges[v] for v in vars])

    #_initialize recarray to return
    dtype = [('radiances', ndarray), ('relative_humidity', ndarray),
             ('temperature', ndarray), ('sfc_temperature', 'f4'),
             ('ozone_mixing_ratio', ndarray), ('epoch', 'f8'),
             ('latitude', 'f8'), ('longitude', 'f8'), ('altitude', 'f4')]
    shis = recarray((len(combos)), dtype)
    shis.__setattr__('fname', 'SIMULATED')

    #_some fake profiles to shove into SHIS
    fk_tmp = tile(-9999, 101)
    fk_rel = tile(-9999, 101)
    fk_ozn = tile(-9999, 101)

    #_add fake float values to some
    shis.sfc_temperature[:] = -9999.
    shis.epoch[:] = -9999
    shis.latitude[:] = latitude
    shis.longitude[:] = longitude
    shis.altitude[:] = 18.

    #_initialize list for metadat
    notes = []

    #_split combinations in processing groups
    groups = setup_groups(combos, **kwargs)

    #_name of script to launch
    dir_log = '/data/wsessions/logs'
    env = ','.join(['='.join((var, os.environ[var])) \
           for var in ['PYTHONPATH', 'PRODUCTS', 'PATH', 'LOG']])
    qsub = ' '.join(('qsub -v', env, '-o', dir_log, '-e', dir_log,
                     '-cwd -S /opt/ShellB3/bin/python'))
    srpt = os.path.expanduser('~/lib/sge_std_dummy_profiles.py')

    #_ THIS COULD BE FORKED
    #_build clddef dict
    for j, opts in enumerate(combos):
        #_get pid
        pid = os.getpid()

        #_launch lbldis
        cloud = [clddef.copy()]
        [cloud[0].update({v: opts[q]}) for q, v in enumerate(vars)]
        cloud[0]['z_top'] = cloud[0]['z'] + dz

        #_build prefix for labeling
        fmt = [
            '{0:s}-{1:05.2f}'.format(v, opts[q]) for q, v in enumerate(vars)
        ]
        notes.append(','.join(fmt))
        fname = '{0:s}/{1:s}.dummy.{2:07d}.namelist'.format(
            dir_lblrtm, '_'.join(fmt), pid)
        oname = '{0:s}/{1:s}.dummy.{2:07d}.output'.format(
            dir_lblrtm, '_'.join(fmt), pid)
        lname = '{0:s}/{1:s}.dummy.{2:07d}.lock'.format(
            dir_lblrtm, '_'.join(fmt), pid)
        #_lname == lockfile name
        with open(lname, 'wb') as f:
            pass

        #_write input paramater file
        lbldis_write(dir_lblrtm, filename=fname, clddef=cloud, **kwargs)

        #_launch
        cmd = ' '.join((qsub, srpt, fname, oname, lname))
        os.system(cmd)

    for j, opts in enumerate(combos):
        #_generate lockfile name
        fmt = [
            '{0:s}-{1:05.2f}'.format(v, opts[q]) for q, v in enumerate(vars)
        ]
        notes.append(','.join(fmt))
        oname = '{0:s}/{1:s}.dummy.{2:07d}.output'.format(
            dir_lblrtm, '_'.join(fmt), pid)
        lname = '{0:s}/{1:s}.dummy.{2:07d}.lock'.format(
            dir_lblrtm, '_'.join(fmt), pid)

        #_loop until ready
        while os.path.exists(lname):
            print 'Waiting on {0}'.format(lname)
            sleep(10)
            continue

        #_get thread
        rads, wave = read_lbldis_fork(oname)

        #_convolve to shis profile
        rads = rads / 1000.
        wn, rd = c2shis(wave, rads, **kwargs)
        rd *= 1000.

        #_put into recarray
        shis[j].radiances = rd
        shis[j].relative_humidity = fk_rel
        shis[j].temperature = fk_tmp
        shis[j].ozone_mixing_ratio = fk_ozn

    #_interpolate blackbody reference
    f1d = splrep(real_shis.wavenumber, real_shis.hbb_nesr, k=3)
    hbb = splev(wn, f1d)

    #_if testing with noise, generate
    if noisey:
        from random import gauss
        for j in xrange(shis.size):

            noise = array([gauss(0, sigma) for sigma in hbb])
            shis[j].radiances = shis[j].radiances.A.squeeze() + noise

    #_add wavenumber to object
    setattr(shis, 'wavenumber', wn)
    setattr(shis, 'pressure', tile(-9999, 101))
    setattr(shis, 'hbb_nesr', hbb)

    #_build string for metadata
    # don't bother making array so write_collocated can remain stable
    notes = '\n'.join(notes)

    #_return
    return shis, notes
def fake_shis(vars=['tau'],
              ranges={'tau': [
                  0,
              ]},
              dir_lblrtm='.',
              dz=0.2,
              latitude=0,
              longitude=0,
              clddef={},
              real_shis=None,
              noisey=False,
              **kwargs):
    ''' generate fake shis retrieval '''
    from numpy import recarray, array, ndarray, tile
    from libtools import combination as combo
    from lblrtm_utils import write_lbldis_parameters as lbldis_write
    from subprocess import call
    from shis import convolve2shis as c2shis
    from scipy.io import netcdf
    from scipy.interpolate import splev, splrep, interp1d
    from hs3_utils import read_shis
    from numpy.random import random
    from multiprocessing import Process, Queue, Pipe
    from libtools import setup_groups

    #_read in real thing for HBB_NESR
    dir_hs3 = kwargs.get('dir_hs3')
    ugh = '{0}/SHIS_rdr20130816T033458end20130816T080506sdr20130817T165537_rad_pcfilt.nc'.format(
        dir_hs3)
    ugh = '{0}/{1}'.format(dir_hs3, real_shis)
    real_shis = read_shis(ugh, idx=array([0]))
    hbb = None

    #_lbldis gets persnickety about this
    if 'LB_LIBRARY_PATH' not in os.environ:
        os.environ['LD_LIBRARY_PATH'] = '/opt/netcdf-4.1.2/lib/'

    #_run lbl-rtm with parameters
    combos = combo([ranges[v] for v in vars])

    #_initialize recarray to return
    dtype = [('radiances', ndarray), ('relative_humidity', ndarray),
             ('temperature', ndarray), ('sfc_temperature', 'f4'),
             ('ozone_mixing_ratio', ndarray), ('epoch', 'f8'),
             ('latitude', 'f8'), ('longitude', 'f8'), ('altitude', 'f4')]
    shis = recarray((len(combos)), dtype)
    shis.__setattr__('fname', 'SIMULATED')

    #_some fake profiles to shove into SHIS
    fk_tmp = tile(-9999, 101)
    fk_rel = tile(-9999, 101)
    fk_ozn = tile(-9999, 101)

    #_add fake float values to some
    shis.sfc_temperature[:] = -9999.
    shis.epoch[:] = -9999
    shis.latitude[:] = latitude
    shis.longitude[:] = longitude
    shis.altitude[:] = 18.

    #_initialize list for metadat
    notes = []

    #_split combinations in processing groups
    groups = setup_groups(combos, **kwargs)

    #_ THIS COULD BE FORKED
    #_build clddef dict
    for i, combo in enumerate(groups):
        nv = len(combo)
        thread = [None] * nv
        pipe = [None] * nv
        pope = [None] * nv

        for j, opts in enumerate(combo):
            #_get pid
            pid = os.getpid()

            #_generate in/out pipe
            pipe[j], pope[j] = Pipe()

            #_launch lbldis
            cloud = [clddef.copy()]
            [cloud[0].update({v: opts[q]}) for q, v in enumerate(vars)]
            cloud[0]['z_top'] = cloud[0]['z'] + dz

            #_build prefix for labeling
            fmt = [
                '{0:s}-{1:05.2f}'.format(v, opts[q])
                for q, v in enumerate(vars)
            ]
            notes.append(','.join(fmt))
            fname = '{0:s}/{1:s}.dummy.{2:07d}.namelist'.format(
                dir_lblrtm, '_'.join(fmt), pid)
            oname = '{0:s}/{1:s}.dummy.{2:07d}.output'.format(
                dir_lblrtm, '_'.join(fmt), pid)

            #_write input paramater file
            lbldis_write(dir_lblrtm, filename=fname, clddef=cloud, **kwargs)

            #_launch process
            args = (fname, oname, pope[j])
            thread[j] = Process(target=run_lbldis_fork, args=args)
            thread[j].start()

        for j, opts in enumerate(combo):
            #_total index number
            k = i * kwargs.get('nproc') + j

            #_get thread
            rads, wave = pipe[j].recv()

            #_convolve to shis profile
            rads = rads / 1000.
            wn, rd = c2shis(wave, rads, **kwargs)
            rd *= 1000.

            #_put into recarray
            shis[k].radiances = rd
            shis[k].relative_humidity = fk_rel
            shis[k].temperature = fk_tmp
            shis[k].ozone_mixing_ratio = fk_ozn

            #_close thread
            thread[j].join()

    #_interpolate blackbody reference
    f1d = splrep(real_shis.wavenumber, real_shis.hbb_nesr, k=3)
    hbb = splev(wn, f1d)

    #_if testing with noise, generate
    if noisey:
        from random import gauss
        for j in xrange(shis.size):

            noise = array([gauss(0, sigma) for sigma in hbb])
            shis[j].radiances = shis[j].radiances.A.squeeze() + noise

    #_add wavenumber to object
    setattr(shis, 'wavenumber', wn)
    setattr(shis, 'pressure', tile(-9999, 101))
    setattr(shis, 'hbb_nesr', hbb)

    #_build string for metadata
    # don't bother making array so write_collocated can remain stable
    notes = '\n'.join(notes)

    #_return
    return shis, notes