Example #1
0
def makechi(filename):
    key = int(filename.split('/')[-1].split('_')[-1].split('.')[0])
    print "chi_condensed_profile", key

    condensedfile = Dataset('cdf/condensed_profile_%08d.nc' % key)
    envfile = Dataset('cdf/condensed_env_profile_%08d.nc' % key)
    shellfile = Dataset('cdf/condensed_shell_profile_%08d.nc' % key)
    statfile = Dataset(mc.get_stat())
 
    t = numpy.atleast_1d(condensedfile.variables['ids'][:])

    cloud_duration = len(t)
    n = len(t)

    z = envfile.variables['z'][:]
    p = statfile.variables['PRES'][0,:]*100.

    area_condensed = numpy.atleast_2d(condensedfile.variables['AREA'][:])

    thetal_condensed = numpy.atleast_2d(condensedfile.variables['THETAL'][:])
    qt_condensed = numpy.atleast_2d(condensedfile.variables['QT'][:])
    ql_condensed = numpy.atleast_2d(condensedfile.variables['QN'][:])
    T_condensed = numpy.atleast_2d(condensedfile.variables['TABS'][:])

    thetal_env = numpy.atleast_2d(envfile.variables['THETAL'][:]) 
    qt_env = numpy.atleast_2d(envfile.variables['QT'][:]) 
    thetal_shell = numpy.atleast_2d(shellfile.variables['THETAL'][:])
    qt_shell = numpy.atleast_2d(shellfile.variables['QT'][:])
    
    mask = ~(area_condensed > 0.)
    
    thetal_condensed_mask = numpy.ma.array(thetal_condensed, mask=mask)
    qt_condensed_mask = numpy.ma.array(qt_condensed, mask=mask)
    ql_condensed_mask = numpy.ma.array(ql_condensed, mask=mask)
    T_condensed_mask = numpy.ma.array(T_condensed, mask=mask)

    thetal_env_mask = numpy.ma.array(thetal_env, mask=mask) 
    qt_env_mask = numpy.ma.array(qt_env, mask=mask) 
    thetal_shell_mask = numpy.ma.array(thetal_shell, mask=mask)
    qt_shell_mask = numpy.ma.array(qt_shell, mask=mask)

    chi = SAM.find_chi_ql(ql_condensed, 
                          thetal_condensed, thetal_env, 
                          qt_condensed, qt_env, 
                          T_condensed, p)
                          
    chi_mean = SAM.find_chi_ql(ql_condensed_mask.mean(0),
                               thetal_condensed_mask.mean(0), 
                               thetal_env_mask.mean(0), 
                               qt_condensed_mask.mean(0), 
                               qt_env_mask.mean(0), 
                               T_condensed_mask.mean(0), 
                               p)

    chi_mean_condensed = SAM.find_chi_ql(ql_condensed_mask.mean(0),
                                  thetal_condensed_mask.mean(0), 
                                  thetal_env, 
                                  qt_condensed_mask.mean(0), qt_env, 
                                  T_condensed_mask.mean(0), p)

    chi_mean_env = SAM.find_chi_ql(ql_condensed,
                                  thetal_condensed, thetal_env_mask.mean(0), 
                                  qt_condensed, qt_env_mask.mean(0), 
                                  T_condensed, p)

#    chi[isnan(chi)] = 0.

    savefile = Dataset('cdf/condensed_chi_profile_%08d.nc' % key, 'w', format='NETCDF3_64BIT')
    savefile.createDimension('id', cloud_duration)
    savefile.createDimension('z', len(z))
    var_t = savefile.createVariable('id', 'd', ('id',))
    var_z = savefile.createVariable('z', 'd', ('z',))
    var_chi = savefile.createVariable('chi', 'd', ('id', 'z'))
    var_chi_mean = savefile.createVariable('chi_mean', 'd', ('id', 'z'))
    var_chi_mean_env = savefile.createVariable('chi_mean_env', 'd', ('id', 'z'))
    var_chi_mean_condensed = savefile.createVariable('chi_mean_condensed', 'd', ('id', 'z'))

    var_t[:] = t
    var_z[:] = z
    var_chi[:] = chi
    var_chi_mean[:] = chi_mean*(~mask)
    var_chi_mean_condensed[:] = chi_mean_condensed*(~mask)
    var_chi_mean_env[:] = chi_mean_env*(~mask)


    condensedfile.close()
    envfile.close()
    shellfile.close()
    statfile.close()

    savefile.close()
def main():
    sample_types = ('CONDENSED', 'EDGE', 'SHELL', 'ENV',)

    stats_dict = {}

    for l in range(mc.nt):
        print l
        cluster_dict = {}          
        nc_files = {}
        nc_files['CONDENSED'] = Dataset('../time_profiles/cdf/condensed_profile_%08d.nc' % l)
        nc_files['PLUME'] = Dataset('../time_profiles/cdf/plume_profile_%08d.nc' % l)    	
        area = nc_files['CONDENSED'].variables['AREA'][:]
        mask = (area > 0.)
        area[~mask] = 0.
        cluster_dict['AREA'] = area[mask]
        
        if mask.sum() == 0:
            nc_files['CONDENSED'].close()
            stats_dict[l] = {}
            continue

        mask_top = mask.copy()
        mask_top[:, 1:-1] = mask[:, 1:-1] & ~mask[:, 2:] & mask[:, :-2]
        mask_bottom = mask.copy()
        mask_bottom[:, 1:-1] = mask[:, 1:-1] & mask[:, 2:] & ~mask[:, :-2]
        nc_files['EDGE'] = Dataset('../time_profiles/cdf/condensed_edge_profile_%08d.nc' % l)
        nc_files['SHELL'] = Dataset('../time_profiles/cdf/condensed_shell_profile_%08d.nc' % l)
        nc_files['ENV'] = Dataset('../time_profiles/cdf/condensed_env_profile_%08d.nc' % l)
        entrain_file = Dataset('../time_profiles/cdf/condensed_entrain_profile_%08d.nc' % l)
        surface_file = Dataset('../time_profiles/cdf/surface_profile_%08d.nc' % l)
        condensed_shell_file = Dataset('../time_profiles/cdf/condensed_shell_profile_%08d.nc' % l)
        chi_file = Dataset('../time_profiles/cdf/condensed_chi_profile_%08d.nc' % l)
        stat_file = Dataset(mc.get_stat())
       
        z = nc_files['CONDENSED'].variables['z'][:]
        z = np.resize(z, mask.shape)
        cluster_dict['z'] = z[mask]

        # Calculate and store cloud thickness for each sample
        # Use maked arrays to preserve axes; if z_min == z_max, thickness = dz
        masked_z = np.ma.masked_where(area==0., z)
        depth = np.ones_like(z)*(masked_z.max(axis=1) - 
            masked_z.min(axis=1))[:, np.newaxis] + mc.dz
        cluster_dict['depth'] = depth[mask]

        # Calculate and store condensed shell relative humidity
        r = condensed_shell_file.variables['QV'][:]
        p = condensed_shell_file.variables['PRES'][:]
        T = condensed_shell_file.variables['TABS'][:]
        relh = thermo.e(r, p)/thermo.e_star(T)
        cluster_dict['RELH_COND_SHELL'] = relh[mask]

        z = z*mask      
        zmax = np.ones_like(mask)*(z.max(1))[:, np.newaxis]        
        z[~mask] = 1e8
        zmin = np.ones_like(mask)*(z.min(1))[:, np.newaxis]
        cluster_dict['z_scaled'] = ((z - zmin.min())/(zmax-zmin.min()))[mask]

        rho = nc_files['CONDENSED'].variables['RHO'][:]
        cluster_dict['RHO'] = rho[mask]
        
        mf = rho*area*nc_files['CONDENSED'].variables['W'][:]
        cluster_dict['MF'] = mf[mask]

        for var in ('W', 'QT', 'THETAV', 'THETAL', 'QN'):
            for type in sample_types:
                temp = nc_files[type].variables[var][:]
        
                cluster_dict[var + '_' + type] = temp[mask]
                if var != 'W':
                    temp = stat_file.variables[var][:]
                    if var == 'QT': 
                        temp = temp/1000.
                    temp2 = nc_files['CONDENSED'].variables[var][:] - temp[l, :]
                    cluster_dict[var + '_' + type + '-MEAN'] = temp2[mask]
                                
            cluster_dict[var + '_CONDENSED-ENV'] = cluster_dict[var + '_CONDENSED'] - cluster_dict[var + '_ENV']
            cluster_dict[var + '_CONDENSED-SHELL'] = cluster_dict[var + '_CONDENSED'] - cluster_dict[var + '_SHELL']

        tv = stat_file.variables['THETAV'][l, :]
        tv[1:-1] = (tv[2:]-tv[:-2])/mc.dz/2.
        tv = tv*ones_like(temp)
        cluster_dict['dTHETAV_dz_MEAN'] = tv[mask]

        for var in ('DWDZ', 'DPDZ', 'THETAV_LAPSE'):
            temp = nc_files['CONDENSED'].variables[var][:]
            cluster_dict[var + '_CONDENSED'] = temp[mask]
            
        chi = chi_file.variables['chi'][:]
        # chi_shell = chi_file.variables['chi_shell'][:]
        # chi_se = chi_file.variables['chi_se'][:]

        cluster_dict['CHI'] = chi[mask]
        # cluster_dict['CHI_SHELL'] = chi_shell[mask]

        surface = surface_file.variables['CONDENSED_SURFACE'][:]
        cluster_dict['SURFACE'] = surface[mask]

        lsmf = stat_file.variables['MFTETCLD'][l, :]
        lsrhoa = stat_file.variables['RHO'][l, :]*stat_file.variables['VTETCLD'][l,:]

        E = entrain_file.variables['ETETCLD'][:]
        D = entrain_file.variables['DTETCLD'][:]
        massflux = entrain_file.variables['MFTETCLD'][:]
        volume = entrain_file.variables['VTETCLD'][:]
        epsilon = E/massflux
        delta = D/massflux
        wepsilon = E/rho/volume
        wdelta = D/rho/volume
               
        cluster_dict['E'] = E[mask]
        cluster_dict['D'] = D[mask]
        cluster_dict['EPSILON'] = epsilon[mask]
        cluster_dict['DELTA'] = delta[mask]
        cluster_dict['EPSILON_LS'] = (E/lsmf)[mask]
        cluster_dict['DELTA_LS'] = (D/lsmf)[mask]
        cluster_dict['WEPSILON'] = wepsilon[mask]
        cluster_dict['WDELTA'] = wdelta[mask]
        cluster_dict['WEPSILON_LS'] = (E/lsrhoa)[mask]
        cluster_dict['WDELTA_LS'] = (D/lsrhoa)[mask]

        for var in (('MF', mf), ('AREA', area)):
            temp = var[1]
            temp_result = (temp[:, 2:] - temp[:, :-2])/mc.dz/2.
            temp_top = (temp[:, 2:] - temp[:, 1:-1])/mc.dz
            temp_bottom = (temp[:, 1:-1] - temp[:, :-2])/mc.dz
            temp_result[mask_top] = temp_top[mask_top]
            temp_result[mask_bottom] = temp_bottom[mask_bottom]
            cluster_dict['d_' + var[0] + '_dz'] = temp_result[mask]

        cluster_dict['TIME'] = ones_like(z[mask])*l*mc.dt
        
        for item in cluster_dict:
            if item in stats_dict:
                stats_dict[item].append(cluster_dict[item])
            else:
                stats_dict[item] = [cluster_dict[item]]

        for type in sample_types:
            nc_files[type].close()
        entrain_file.close()
        chi_file.close()
        
    for item in stats_dict:
        stats_dict[item] = np.hstack(stats_dict[item])

    pickle.dump(stats_dict, open('pkl/condensed_time_stats.pkl', 'wb'))
#!/usr/bin/env python
#Runtime (690, 130, 128, 128): 3 hours 40 minutes

from pylab import *
import numpy
import cPickle
from netCDF4 import Dataset
import os
from ent_analysis.lib.thermo import SAM
import ent_analysis.lib.model_param as mc

# Load mean cloud field stat
stat_file = Dataset(mc.get_stat())
data = {'z': stat_file.variables['z'][:].astype(double),
    'RHO' : stat_file.variables['RHO'][0,:].astype(double),
    'PRES' : stat_file.variables['PRES'][0,:].astype(double)*100.}
stat_file.close()

def create_savefile(t, data, vars, profile_name):
    ids = data['ids'][:]
    z = data['z'][:]
    savefile = Dataset('cdf/%s_profile_%08d.nc' % (profile_name, t), 'w')
    
    # Create savefile
    savefile.createDimension('ids', len(ids))
    savefile.createDimension('z', len(z))

    tsavevar = savefile.createVariable('ids', 'd', ('ids',))
    tsavevar[:] = ids[:]
    zsavevar = savefile.createVariable('z', 'd', ('z',))
    zsavevar[:] = z[:]
Example #4
0
def main():
    sample_types = ('CORE', 'ENV', 'PLUME')

    stats_dict = {}

    for l in range(mc.nt):
        print l
        cluster_dict = {}
        
        nc_files = {}
        nc_files['CORE'] = Dataset('../time_profiles/cdf/core_profile_%08d.nc' % l)
        nc_files['PLUME'] = Dataset('../time_profiles/cdf/plume_profile_%08d.nc' % l)	
        area = nc_files['CORE'].variables['AREA'][:]
        mask = (area > 0.)
        area[~mask] = 0.
        cluster_dict['AREA'] = area[mask]
        
        if mask.sum() == 0:
            nc_files['CORE'].close()
            nc_files['PLUME'].close()
            continue

        nc_files['EDGE'] = Dataset('../time_profiles/cdf/core_edge_profile_%08d.nc' % l)
        nc_files['SHELL'] = Dataset('../time_profiles/cdf/core_shell_profile_%08d.nc' % l)
        nc_files['ENV'] = Dataset('../time_profiles/cdf/core_env_profile_%08d.nc' % l)
        entrain_file = Dataset('../time_profiles/cdf/core_entrain_profile_%08d.nc' % l)
        surface_file = Dataset('../time_profiles/cdf/surface_profile_%08d.nc' % l)
        condensed_shell_file = Dataset('../time_profiles/cdf/condensed_shell_profile_%08d.nc' % l)
        chi_file = Dataset('../time_profiles/cdf/core_chi_profile_%08d.nc' % l)
        stat_file = Dataset(mc.get_stat())

        z = nc_files['CORE'].variables['z'][:]
        z = np.resize(z, mask.shape)
        cluster_dict['z'] = z[mask]
        
        # Calculate and store cloud thickness for each sample
        # Use maked arrays to preserve axes; if z_min == z_max, thickness = dz
        masked_z = np.ma.masked_where(area==0., z)
        depth = np.ones_like(z)*(masked_z.max(axis=1) - 
            masked_z.min(axis=1))[:, np.newaxis] + mc.dz
        cluster_dict['depth'] = depth[mask]
        
        # Calculate and store condensed shell relative humidity
        r = condensed_shell_file.variables['QV'][:]
        p = condensed_shell_file.variables['PRES'][:]
        T = condensed_shell_file.variables['TABS'][:]
        relh = thermo.e(r, p)/thermo.e_star(T)
        cluster_dict['RELH_COND_SHELL'] = relh[mask]

        stat_core = stat_file.variables['COR'][l, :]
        if (stat_core > 0.).any():
            k_cb = np.nonzero(stat_core > 0.)[0].min()
        else:
            k_cb = np.nonzero(area)[1].min()
            
        z_cb = ones_like(z)*z[0, k_cb]
        cluster_dict['z_cb'] = z_cb[mask]
        
        z = z*mask
        zmax = np.ones_like(mask)*(z.max(1))[:, np.newaxis]        
        z[~mask] = 1e10
        zmin = np.ones_like(mask)*(z.min(1))[:, np.newaxis]
        cluster_dict['z_scaled'] = ((z - zmin.min())/(zmax-zmin.min()))[mask]

        rho = nc_files['CORE'].variables['RHO'][:]
        cluster_dict['RHO'] = rho[mask]
        
        mf = rho*area*nc_files['CORE'].variables['W'][:]
        cluster_dict['MF'] = mf[mask]

        for var in ('W', 'QT', 'THETAV', 'THETAL', 'QN'):
            for type in sample_types:
                temp = nc_files[type].variables[var][:]
        
                cluster_dict[var + '_' + type] = temp[mask]
                if var != 'W':
                    temp = stat_file.variables[var][:]
                    if var == 'QT': 
                        temp = temp/1000.
                    temp2 = nc_files['CORE'].variables[var][:] - temp[l, :]
                    cluster_dict[var + '_' + type + '-MEAN'] = temp2[mask]
                                
            cluster_dict[var + '_CORE-ENV'] = cluster_dict[var + '_CORE'] - cluster_dict[var + '_ENV']
            # cluster_dict[var + '_CORE-SHELL'] = cluster_dict[var + '_CORE'] - cluster_dict[var + '_SHELL']

        qsat = SAM.qsatw(nc_files['CORE'].variables['TABS'][:], 
                         nc_files['CORE'].variables['PRES'][:])
        cluster_dict['QSAT_CORE'] = qsat[mask]

        qsat_cb = qsat[:, k_cb]
        qsat_cb = ones_like(qsat)*qsat_cb[:, np.newaxis]
        cluster_dict['QSAT_CB'] = qsat_cb[mask]

        tv = stat_file.variables['THETAV'][l, :]
        tv[1:-1] = (tv[2:]-tv[:-2])/mc.dz/2.
        tv = tv*ones_like(temp)
        cluster_dict['dTHETAV_dz_MEAN'] = tv[mask]

        chi = chi_file.variables['chi_theta'][:]
        cluster_dict['CHI'] = chi[mask]

        surface = surface_file.variables['CORE_SURFACE'][:]
        cluster_dict['SURFACE'] = surface[mask]

        lsmf = stat_file.variables['MFTETCOR'][l, :]
        lsrhoa = stat_file.variables['RHO'][l, :]*stat_file.variables['VTETCOR'][l,:]
        
        qc = stat_file.variables['QTCOR'][l, :]/1000.
        qe = stat_file.variables['QTCEN'][l, :]/1000.
        tc = stat_file.variables['TLCOR'][l, :]
        te = stat_file.variables['TLCEN'][l, :]
        wc = stat_file.variables['WCOR'][l, :]
        we = stat_file.variables['WCEN'][l, :]
        
        dwdt = entrain_file.variables['DWDT'][:]
        E = entrain_file.variables['ETETCOR'][:]
        D = entrain_file.variables['DTETCOR'][:]
        Eq = entrain_file.variables['EQTETCOR'][:]
        Dq = entrain_file.variables['DQTETCOR'][:]
        Et = entrain_file.variables['ETTETCOR'][:]
        Dt = entrain_file.variables['DTTETCOR'][:]
        Ew = entrain_file.variables['EWTETCOR'][:]
        Dw = entrain_file.variables['DWTETCOR'][:]
        
        massflux = entrain_file.variables['MFTETCOR'][:]
        volume = entrain_file.variables['VTETCOR'][:]
               
        cluster_dict['DWDT'] = dwdt[mask]
        cluster_dict['E'] = E[mask]
        cluster_dict['D'] = D[mask]
        cluster_dict['EQ'] = Eq[mask]
        cluster_dict['DQ'] = Dq[mask]
        cluster_dict['ET'] = Et[mask]
        cluster_dict['DT'] = Dt[mask]
        cluster_dict['EW'] = Ew[mask]
        cluster_dict['DW'] = Dw[mask]
        
        Aq = ((Eq/E) - qe)/(qc - qe)
        Bq = (qc - (Dq/D))/(qc - qe)
        At = ((Et/E) - te)/(tc - te)
        Bt = (tc - (Dt/D))/(tc - te)
        Aw = ((Ew/E) - we)/(wc - we)
        Bw = (wc -(Dw/D))/(wc - we)
        
        cluster_dict['AQ'] = Aq[mask]
        cluster_dict['BQ'] = Bq[mask]
        cluster_dict['AT'] = At[mask]
        cluster_dict['BT'] = Bt[mask]
        cluster_dict['AW'] = Aw[mask]
        cluster_dict['BW'] = Bw[mask]
        
        Eq_T = ((qc*(E-D) - (Eq-Dq))/(qc-qe))
        Dq_T = ((qe*(E-D) - (Eq-Dq))/(qc-qe))
        Et_T = ((tc*(E-D) - (Et-Dt))/(tc-te))
        Dt_T = ((te*(E-D) - (Et-Dt))/(tc-te))
        Ew_T = ((wc*(E-D) - (Ew-Dw))/(wc-we))
        Dw_T = ((we*(E-D) - (Ew-Dw))/(wc-we))
        
        cluster_dict['EQ_T'] = Eq_T[mask]
        cluster_dict['DQ_T'] = Dq_T[mask]
        cluster_dict['ET_T'] = Et_T[mask]
        cluster_dict['DT_T'] = Dt_T[mask]
        cluster_dict['EW_T'] = Ew_T[mask]
        cluster_dict['DW_T'] = Dw_T[mask]
        
        cluster_dict['EPSILON'] = (E/massflux)[mask]
        cluster_dict['Q_EPSILON'] = (Eq/massflux)[mask]
        cluster_dict['T_EPSILON'] = (Et/massflux)[mask]
        cluster_dict['W_EPSILON'] = (Ew/massflux)[mask]
        cluster_dict['Q_EPSILON_T'] = (Eq_T/massflux)[mask]
        cluster_dict['T_EPSILON_T'] = (Et_T/massflux)[mask]
        cluster_dict['W_EPSILON_T'] = (Ew_T/massflux)[mask]
        
        cluster_dict['DELTA'] = (D/massflux)[mask]
        cluster_dict['Q_DELTA'] = (Dq/massflux)[mask]
        cluster_dict['T_DELTA'] = (Dt/massflux)[mask]
        cluster_dict['W_DELTA'] = (Dw/massflux)[mask]
        cluster_dict['Q_DELTA_T'] = (Dq_T/massflux)[mask]
        cluster_dict['T_DELTA_T'] = (Dt_T/massflux)[mask]
        cluster_dict['W_DELTA_T'] = (Dw_T/massflux)[mask]

        for var in ('DWDZ', 'DPDZ', 'THETAV_LAPSE'):
            temp = nc_files['CORE'].variables[var][:]
            cluster_dict[var + '_CORE'] = temp[mask]            

        ww_reyn = nc_files['CORE'].variables['WWREYN'][:]
        wq_reyn = nc_files['CORE'].variables['WQREYN'][:]
        
        ww_reyn = ww_reyn*rho*area
        wq_reyn = wq_reyn*rho*area
        
        ww_reyn[~mask] = 0.
        wq_reyn[~mask] = 0.

        qt_core = nc_files['CORE'].variables['QT'][:]
        qt_core[~mask] = 0.

        mask_top = mask.copy()
        mask_top[:, 1:-1] = mask[:, 1:-1] & ~mask[:, 2:] & mask[:, :-2]
        mask_bottom = mask.copy()
        mask_bottom[:, 1:-1] = mask[:, 1:-1] & mask[:, 2:] & ~mask[:, :-2]

        for var in (('MF', mf), 
                    ('AREA', area), 
                    ('WW', ww_reyn), 
                    ('WQ', wq_reyn),
                    ('Q', qt_core),):
            temp = var[1]
            temp_result = (temp[:, 2:] - temp[:, :-2])/mc.dz/2.
            temp_top = (temp[:, 2:] - temp[:, 1:-1])/mc.dz
            temp_bottom = (temp[:, 1:-1] - temp[:, :-2])/mc.dz
            temp_result[mask_top] = temp_top[mask_top]
            temp_result[mask_bottom] = temp_bottom[mask_bottom]
            cluster_dict['D' + var[0] + 'DZ_CORE'] = temp_result[mask]

        cluster_dict['TIME'] = ones_like(z[mask])*l*mc.dt
        ids = nc_files['CORE'].variables['ids'][:]
        cluster_dict['ID'] = (ones_like(z)*ids[:, np.newaxis])[mask]
        
        for item in cluster_dict:
            if item in stats_dict:
                stats_dict[item].append(cluster_dict[item])
            else:
                stats_dict[item] = [cluster_dict[item]]

        for type in sample_types:
            nc_files[type].close()
        entrain_file.close()
        chi_file.close()
        
    for item in stats_dict:
        stats_dict[item] = np.hstack(stats_dict[item])

    pickle.dump(stats_dict, open('pkl/core_time_stats.pkl', 'wb'))
def main(filename):
    vars = {
          'ETETCLD': var_calcs.etetcld,
          'DTETCLD': var_calcs.dtetcld,
          'EQTETCLD': var_calcs.eqtetcld,
          'DQTETCLD': var_calcs.dqtetcld,
          'ETTETCLD': var_calcs.ettetcld,
          'DTTETCLD': var_calcs.dttetcld,
          'EWTETCLD': var_calcs.ewtetcld,
          'DWTETCLD': var_calcs.dwtetcld,
          'VTETCLD': var_calcs.vtetcld,
          'MFTETCLD': var_calcs.mftetcld,
    }
    
    # Automatically load time step from output file name
    time = mc.time_picker(filename)
    
    # Load CDF Files
    nc_file = Dataset(filename)
    stat_file = Dataset(mc.get_stat())

    data = {'z': stat_file.variables['z'][:].astype(double),
            'RHO' : stat_file.variables['RHO'][time,:].astype(double),
            'PRES' : stat_file.variables['PRES'][time,:].astype(double)*100.}
    stat_file.close()

    # For each cloud, iterate over all times
    cloud_filename = '../cloudtracker/pkl/cloud_data_%08d.pkl' % time
    # Load the cloud data at that timestep
    clouds = cPickle.load(open(cloud_filename, 'rb'))
        
    ids = clouds.keys()
    ids.sort()
        
    data['ids'] = numpy.array(ids)
    for name in ('ETETCLD', 'DTETCLD',
                 'EQTETCLD', 'DQTETCLD',
                 'ETTETCLD', 'DTTETCLD',
                 'EWTETCLD', 'DWTETCLD',
                 'VTETCLD', 'MFTETCLD'):
        data[name] = nc_file.variables[name][0, :].astype(numpy.double)
                
    # For each cloud, create a savefile for each profile
    savefiles = {}
    profiles = {}
    for item in ('condensed_entrain',):
        savefile, variables = create_savefile(time, data, vars, item)
        savefiles[item] = savefile
        profiles[item] = variables
        
    for n, id in enumerate(ids):
        print "time: ", time, " id: ", id
        # Select the current cloud id
        cloud = clouds[id]
        cloud['condensed_entrain'] = numpy.hstack([cloud['condensed'], cloud['condensed_shell']])

        make_profiles(profiles, cloud, vars, data, n)
            
    for savefile in savefiles.values():
        savefile.close()

    nc_file.close()
Example #6
0
def makechi(filename):
    key = int(filename.split("/")[-1].split("_")[-1].split(".")[0])
    print "chi_core_profile", key

    corefile = Dataset("cdf/core_profile_%08d.nc" % key)
    envfile = Dataset("cdf/core_env_profile_%08d.nc" % key)
    shellfile = Dataset("cdf/core_shell_profile_%08d.nc" % key)
    statfile = Dataset(mc.get_stat())

    t = numpy.atleast_1d(corefile.variables["ids"][:])

    cloud_duration = len(t)
    n = len(t)

    z = envfile.variables["z"][:]
    p = statfile.variables["PRES"][0, :] * 100.0
    thetav_mean = statfile.variables["THETAV"][int(key), :]

    area_core = numpy.atleast_2d(corefile.variables["AREA"][:])

    thetal_core = numpy.atleast_2d(corefile.variables["THETAL"][:])
    thetav_core = numpy.atleast_2d(corefile.variables["THETAV"][:])
    qt_core = numpy.atleast_2d(corefile.variables["QT"][:])
    ql_core = numpy.atleast_2d(corefile.variables["QN"][:])
    T_core = numpy.atleast_2d(corefile.variables["TABS"][:])
    p = numpy.atleast_2d(corefile.variables["PRES"][:])

    thetal_env = numpy.atleast_2d(envfile.variables["THETAL"][:])
    qt_env = numpy.atleast_2d(envfile.variables["QT"][:])

    thetal_shell = numpy.atleast_2d(shellfile.variables["THETAL"][:])
    qt_shell = numpy.atleast_2d(shellfile.variables["QT"][:])

    mask = ~(area_core > 0.0)

    thetal_core_mask = numpy.ma.array(thetal_core, mask=mask)
    thetav_core_mask = numpy.ma.array(thetav_core, mask=mask)
    qt_core_mask = numpy.ma.array(qt_core, mask=mask)
    ql_core_mask = numpy.ma.array(ql_core, mask=mask)
    T_core_mask = numpy.ma.array(T_core, mask=mask)

    thetal_env_mask = numpy.ma.array(thetal_env, mask=mask)
    qt_env_mask = numpy.ma.array(qt_env, mask=mask)

    thetal_shell_mask = numpy.ma.array(thetal_shell, mask=mask)
    qt_shell_mask = numpy.ma.array(qt_shell, mask=mask)

    chi_theta = SAM.find_chi_theta(thetal_core, thetal_env, thetav_core, thetav_mean, qt_core, qt_env, T_core, p)

    chi_theta_mean = SAM.find_chi_theta(
        thetal_core_mask.mean(0),
        thetal_env_mask.mean(0),
        thetav_core_mask.mean(0),
        thetav_mean,
        qt_core_mask.mean(0),
        qt_env_mask.mean(0),
        T_core_mask.mean(0),
        p,
    )
    chi_theta_mean_core = SAM.find_chi_theta(
        thetal_core_mask.mean(0),
        thetal_env,
        thetav_core_mask.mean(0),
        thetav_mean,
        qt_core_mask.mean(0),
        qt_env,
        T_core_mask.mean(0),
        p,
    )
    chi_theta_mean_env = SAM.find_chi_theta(
        thetal_core, thetal_env_mask.mean(0), thetav_core, thetav_mean, qt_core, qt_env_mask.mean(0), T_core, p
    )

    chi_ql = SAM.find_chi_ql(ql_core, thetal_core, thetal_env, qt_core, qt_env, T_core, p)
    chi_ql_mean = SAM.find_chi_ql(
        ql_core_mask.mean(0),
        thetal_core_mask.mean(0),
        thetal_env_mask.mean(0),
        qt_core_mask.mean(0),
        qt_env_mask.mean(0),
        T_core_mask.mean(0),
        p,
    )
    chi_ql_mean_core = SAM.find_chi_ql(
        ql_core_mask.mean(0), thetal_core_mask.mean(0), thetal_env, qt_core_mask.mean(0), qt_env, T_core_mask.mean(0), p
    )
    chi_ql_mean_env = SAM.find_chi_ql(
        ql_core, thetal_core, thetal_env_mask.mean(0), qt_core, qt_env_mask.mean(0), T_core, p
    )

    #    chi[isnan(chi)] = 0.

    savefile = Dataset("cdf/core_chi_profile_%08d.nc" % key, "w", format="NETCDF3_64BIT")
    savefile.createDimension("id", cloud_duration)
    savefile.createDimension("z", len(z))
    var_t = savefile.createVariable("id", "d", ("id",))
    var_z = savefile.createVariable("z", "d", ("z",))
    var_chi_theta = savefile.createVariable("chi_theta", "d", ("id", "z"))
    var_chi_theta_mean = savefile.createVariable("chi_theta_mean", "d", ("id", "z"))
    var_chi_theta_mean_env = savefile.createVariable("chi_theta_mean_env", "d", ("id", "z"))
    var_chi_theta_mean_core = savefile.createVariable("chi_theta_mean_core", "d", ("id", "z"))
    var_chi_ql = savefile.createVariable("chi_ql", "d", ("id", "z"))
    var_chi_ql_mean = savefile.createVariable("chi_ql_mean", "d", ("id", "z"))
    var_chi_ql_mean_env = savefile.createVariable("chi_ql_mean_env", "d", ("id", "z"))
    var_chi_ql_mean_core = savefile.createVariable("chi_ql_mean_core", "d", ("id", "z"))

    var_t[:] = t
    var_z[:] = z
    var_chi_theta[:] = chi_theta
    var_chi_theta_mean[:] = chi_theta_mean * (~mask)
    var_chi_theta_mean_core[:] = chi_theta_mean_core * (~mask)
    var_chi_theta_mean_env[:] = chi_theta_mean_env * (~mask)
    var_chi_ql[:] = chi_ql
    var_chi_ql_mean[:] = chi_ql_mean * (~mask)
    var_chi_ql_mean_core[:] = chi_ql_mean_core * (~mask)
    var_chi_ql_mean_env[:] = chi_ql_mean_env * (~mask)

    corefile.close()
    envfile.close()
    shellfile.close()
    statfile.close()

    savefile.close()
Example #7
0
def main(filename):
    vars = {
          'AREA': var_calcs.area,
          'TABS': var_calcs.tabs,
          'QN': var_calcs.qn,
          'QV': var_calcs.qv,
          'QT': var_calcs.qt,
          'U': var_calcs.u,
          'V': var_calcs.v,
          'W': var_calcs.w,
          'THETAV': var_calcs.thetav,
          'THETAV_LAPSE': var_calcs.thetav_lapse,
          'THETAL': var_calcs.thetal,
          'MSE': var_calcs.mse,
          'RHO': var_calcs.rho,
          'PRES': var_calcs.press,
          'WQREYN': var_calcs.wqreyn,
          'WWREYN': var_calcs.wwreyn,
          'DWDZ': var_calcs.dw_dz,
          'DPDZ': var_calcs.dp_dz,
          'TR01': var_calcs.tr01,
    }
    
    # Automatically load time step from output file name
    time = mc.time_picker(filename)
    
    # Load CDF Files
    nc_file = Dataset(filename)
    stat_file = Dataset(mc.get_stat())

    data = {'z': nc_file.variables['z'][:].astype(double),
            'p': nc_file.variables['p'][:].astype(double),
            'RHO' : stat_file.variables['RHO'][time,:].astype(double),
            }
    stat_file.close()
    
    # For each cloud, iterate over all times
    cloud_filename = '../cloudtracker/pkl/cloud_data_%08d.pkl' % time
   
    # Load the cloud data at that timestep
    clouds = cPickle.load(open(cloud_filename, 'rb'))
       
    ids = clouds.keys()
    ids.sort()

    data['ids'] = numpy.array(ids)
    for name in ('QV', 'QN', 'TABS', 'PP', 'U', 'V', 'W', 'TR01'):
        data[name] = nc_file.variables[name][0, :].astype(numpy.double)
                
    # For each cloud, create a savefile for each profile
    savefiles = {}
    profiles = {}
    for item in ('core', 'condensed', 'condensed_shell', 
                 'condensed_edge', 'condensed_env',
                 'core_shell', 
                 'core_edge', 'core_env', 
                 'plume'):            
		 
        savefile, variables = create_savefile(time, data, vars, item)
        savefiles[item] = savefile
        profiles[item] = variables
        
    for n, id in enumerate(ids):
        print "time: ", time, " id: ", id
        # Select the current cloud id
        cloud = clouds[id]
	
        make_profiles(profiles, cloud, vars, data, n)
        
    for savefile in savefiles.values():
        savefile.close()

    nc_file.close()