コード例 #1
0
ファイル: TIL.py プロジェクト: timhoar/DARTpy
def Nsq_forcing_from_Q(E, datetime_in=None, debug=False, hostname='taurus'):
    """
	Birner (2010) used the thermodynamic equation in the TEM form to derive an expression 
	for the rate of change of static stability (N2) due to residual motion and diabatic heating. 

	This subroutine compares the term due to diabatic heating, i.e.: 
	g d(Q/theta)dz

	INPUTS:
	E: a DART experiment dictionary. Relevant fields are:
		E['exp_name'] - the experiment name
		E['daterange'] - helps to choose which date to load in case this isn't specifically given
		E['variable'] - this determines what kind of diabatic heating we use:
			the value of E['variable'] should be a string like 'Nsq_forcing_XXXXX'
			where XXXXX is the model variable corresponding to whatever diabatic 
			heating type we are looking for. 
			For example, in WACCM, 'QRL_TOT' is the total longwave heating, so to get the 
			N2 forcing from that, just set E['variable']='Nsq_forcing_QRL_TOT'
	datetime_in: the date for which we want to compute this diagnostic. 
		default is None -- in this case, just choose the fist date in E['daterange']


	OUTPUTS:
	N2_forcing: Nsquared forcing term  in s^2/day
	lev
	lat 
	"""

    # necessary constants
    H = 7000.0  # scale height in m
    p0 = 1000.0  # reference pressure in hPa
    g = 9.8  # acceleration of gravity

    # load the desired diabatic heating term
    # this is not typically part of the DART output, so load from model history files
    # (right now this really only works for WACCM/CAM)
    Qstring = E['variable'].strip('Nsq_forcing_')
    EQ = E.copy()
    EQ['variable'] = Qstring
    DQ = DSS.compute_DART_diagn_from_model_h_files(EQ,
                                                   datetime_in,
                                                   verbose=debug)
    # remove the time dimension, which should have length 1
    DQ['data'] = np.squeeze(DQ['data'])

    # also load potential temperature
    ET = E.copy()
    ET['variable'] = 'theta'
    Dtheta = dart.load_DART_diagnostic_file(ET,
                                            datetime_in,
                                            hostname=hostname,
                                            debug=debug)
    # squeeze out extra dims, which we get if we load single copies (e.g. ensemble mean)
    Dtheta['data'] = np.squeeze(Dtheta['data'])

    # now find the longitude dimension and average over it
    # for both Q and theta
    Q_mean = DSS.average_over_named_dimension(DQ['data'], DQ['lon'])
    theta_mean = DSS.average_over_named_dimension(Dtheta['data'],
                                                  Dtheta['lon'])

    # if the shapes don't match up, might have to transpose one of them
    #	if Mean_arrays[1].shape[0] != Q_mean.shape[0]:
    #		theta_mean=np.transpose(Mean_arrays[1])
    #	else:
    #		theta_mean=Mean_arrays[1]

    # Q_mean should come out as copy x lev x lat, whereas theta_mean is copy x lat x lev
    # to manually transpose Q_mean
    Q_mean2 = np.zeros(shape=theta_mean.shape)
    if Q_mean2.ndim == 3:
        for icopy in range(theta_mean.shape[0]):
            for ilat in range(theta_mean.shape[1]):
                for ilev in range(theta_mean.shape[2]):
                    Q_mean2[icopy, ilat, ilev] = Q_mean[icopy, ilev, ilat]
    else:
        for ilat in range(theta_mean.shape[0]):
            for ilev in range(theta_mean.shape[1]):
                Q_mean2[ilat, ilev] = Q_mean[ilev, ilat]

    # divide Q by theta
    X = Q_mean2 / theta_mean

    # convert pressure levels to approximate altitude and take the vertical gradient
    lev = DQ['lev']
    zlev = H * np.log(p0 / lev)
    dZ = np.gradient(zlev)  # gradient of vertical levels in m

    # now X *should* have shape (copy x lat x lev) OR (lat x lev)
    # so need to copy dZ to look like this
    if X.ndim == 3:
        dZm = dZ[None, None, :]
        levdim = 2
    if X.ndim == 2:
        dZm = dZ[None, :]
        levdim = 1
    dZ3 = np.broadcast_to(dZm, X.shape)
    dXdZ_3D = np.gradient(X, dZ3)
    dxdz = dXdZ_3D[
        levdim]  # this is the vertical gradient with respect to height

    # the above calculation yields a quantity in units s^-2/s, but it makes more sense
    # in the grand scheme of things to look at buoyancy forcing per day, so here
    # is a conversion factor.
    seconds_per_day = 60. * 60. * 24.0

    # now loop over ensemble members and compute the n2 forcing for each one
    N2_forcing = g * dxdz * seconds_per_day

    D = dict()
    D['data'] = N2_forcing
    D['lev'] = DQ['lev']
    D['lat'] = DQ['lat']
    D['units'] = 's^{-2}/day'
    D['long_name'] = 'N^{2} Forcing'

    return D