コード例 #1
0
ファイル: MJO.py プロジェクト: timhoar/DARTpy
def plot_variance_maps(E,cbar=True,hostname="taurus"):

	# given a certain experiment or dataset over a certain daterange, 
	# plot the MJO-related variance on a map

	# load the variance map  
	VV,lat,lon = variance_maps(E,hostname=hostname)  

 	# set up the  map projection
	map = Basemap(projection='mill',llcrnrlat=-90,urcrnrlat=90,\
		    #llcrnrlon=-180,urcrnrlon=180,resolution='c')
		    llcrnrlon=0,urcrnrlon=360,resolution='c')

        # draw coastlines, country boundaries, fill continents.
	map.drawcoastlines(linewidth=0.25)
	map.drawcountries(linewidth=0.25)

        # draw lat/lon grid lines every 30 degrees.
	map.drawmeridians(np.arange(0,360,30),linewidth=0.25)
	map.drawparallels(np.arange(-90,90,30),linewidth=0.25)

        # compute native map projection coordinates of lat/lon grid.
	X,Y = np.meshgrid(lon,lat)
	x, y = map(X, Y)

        # choose color map based on the variable in question
	E['extras'] = 'MJO variance'
	colors,cmap,cmap_type = DSS.state_space_HCL_colormap(E)

	## if no color limits are specified, at least make them even on each side
        #if clim is None:
        #        clim = np.nanmax(np.absolute(VV))
	#print('------clim----------')
	#print(clim)


        # contour data over the map.
	cs = map.contourf(x,y,VV,15,cmap=cmap)
	#cs = map.contourf(x,y,M,len(colors)-1,colors=colors)
	#cs = map.contourf(X,Y,M,len(colors)-1,cmap=cmap,extend="both",vmin=-clim,vmax=clim)

	# apply color limits, but not if it's a log contour plot
	#if log_levels is None:
	#	print('applying color limits')
	#	if cmap_type == 'divergent':
	#		plt.clim([-clim,clim])
	#	else:
	#		plt.clim([0,clim])

	#if cbar:
	#	if (clim > 1000):
	#		CB = plt.colorbar(cs, shrink=0.6, extend='both',format='%.1e')
	#	if (clim < 0.001):
	#		CB = plt.colorbar(cs, shrink=0.6, extend='both',format='%.1e')
	#	else:
	CB = plt.colorbar(cs, shrink=0.6, extend='both')
コード例 #2
0
ファイル: MJO.py プロジェクト: timhoar/DARTpy
def plot_correlations_lag_lat_or_lon(E,climatology_option='NODA',maxlag=25,lag_versus_what='lon',nilter_order=50,cbar=True,hostname="taurus",debug=False):

	"""
	 given a certain experiment or dataset over a certain daterange, 
	 plot the correlation between wind or precip anomalies in one reference
	 region, relative to everywhere else, either 
	 as a function of latitude and longite, and Lag.  
	 this should produce figures like Figs. 5-6 of Waliser et al. 

	INPUTS:  
	E - a standard DART experiment dictionary, with the variable field and level range corresponding to some MJO variable  
	maxlag: the limit of the lag (in days) that we look at 
	lag_versus_what: choose 'lat' or 'lon'  
	cbar: set to True to have a colorbar  
	hostname: computer name - default is Taurus  
	climatology_option: choose which climatology to take the anomalies to respect with -- default is "NODA"  
	"""

	# load the correlation field 
	R,S,L,x = correlations_lag_lat_or_lon(E,maxlag,lag_versus_what,filter_order,climatology_option,hostname=hostname,verbose=debug)

        # choose color map based on the variable in question
	E['extras'] = 'Correlation'
	colors,cmap,cmap_type = DSS.state_space_HCL_colormap(E,reverse=True)
	
	# choose axis labels  
	plt.ylabel('Lag (days)')
	if lag_versus_what=='lat':
		plt.xlabel('Latitude')
	if lag_versus_what=='lon':
		plt.xlabel('Longitude')

	# set the contour levels - it depends on the color limits and the number of colors we have  
	clim = 1.0
	if cmap_type == 'divergent':
		clevels  = np.linspace(start=-clim,stop=clim,num=11)
	else:
		clevels  = np.linspace(start=0,stop=clim,num=11)

        # contour plot of the chosen variable
	cs = plt.contourf(x,L,R,levels=clevels,cmap=cmap)
	plt.clim([-1.0,1.0])

	if (cbar is not None):
		CB = plt.colorbar(cs, shrink=0.6, extend='both', orientation=cbar)

	return x,L,R,S
コード例 #3
0
ファイル: MJO.py プロジェクト: timhoar/DARTpy
def ano(E,climatology_option = 'NODA',hostname='taurus',verbose=False):

	"""
	Compute anomaly fields relative to some climatology

	Inputs allowed for climatology_option:  
	'NODA': take the ensemble mean of the corresponding no-DA experiment as a 40-year climatology  
	'F_W4_L66': daily climatology of a CESM+WACCM simulation with realistic forcings, 1951-2010
	None: don't subtract out anything -- just return the regular fields in the same shape as other "anomalies"  
	"""

	# load climatology 
	Xclim,lat,lon,lev,DR = load_climatology(E,climatology_option,hostname)

	# change the daterange in the anomalies to suit what was found for climatology  
	if len(DR) != len(E['daterange']):
		print('Changing the experiment daterange to the dates found for the requested climatology')
		E['daterange'] = DR
		d1 = DR[0].strftime("%Y-%m-%d")
		d2 = DR[len(E['daterange'])-1].strftime("%Y-%m-%d")
		print('new daterange goes from '+d1+' to '+d2)

	# some climatologies are only available at daily resolution, so 
	# in that case we have to change the daterange in E to be daily  
	if (climatology_option == 'F_W4_L66'):
		d0 = E['daterange'][0]
		df = E['daterange'][len(E['daterange'])-1]
		days = df-d0
		DRnew =  dart.daterange(date_start=d0, periods=days.days+1, DT='1D')
		E['daterange'] = DRnew

	# load the desired model fields for the experiment
	Xlist = []	# empty list to hold the fields we retrieve for every day  
	for date in E['daterange']:
		X,lat0,lon0,lev0 = DSS.compute_DART_diagn_from_model_h_files(E,date,hostname=hostname,verbose=verbose)
		if X is not None:
			Xs = np.squeeze(X)
			Xlist.append(Xs)
			lat = lat0
			lon = lon0
			lev = lev0

	# check that the right vertical levels were loaded
	if verbose:
		print('------computing daily anomalies for the following vertical levels and variable:-------')
		print(lev)
		print(E['variable'])

	# compute anomalies:
	# for this we turn the model fields into a matrix and subtract from the climatology
	XX = np.concatenate([X[..., np.newaxis] for X in Xlist], axis=len(Xs.shape))
	if climatology_option == None:
		AA = XX
	else:
		# if the climatology does not have shape lat x lon x lev x time, 
		# run swapaxes 2x to get it as such  
		# NOTE: this is still a kludge and probably wont work with all datasets - check this carefully 
		# with your own data 
		XclimS = np.squeeze(Xclim)
		nT = len(DRnew)
		lastdim = len(XclimS.shape)-1
		for s,ii in zip(XclimS.shape,range(len(XclimS.shape))):
			if s == nT:
				time_dim = ii

		# if only retrieveing a single date, don't need to do any reshaping
		# but might need to squeeze out a length-one time dimension
		if nT == 1:
			XclimR = XclimS
			XX = np.squeeze(XX)
		else:
			# if time is the last dimension, don't need to reshape Xclim 
			if time_dim == lastdim: 
				XclimR = XclimS
			# if time is the first dimension, need to reshape Xclim
			if time_dim == 0:	
				Xclim2 = XclimS.swapaxes(0,lastdim)
				XclimR = Xclim2.swapaxes(0,1)


		AA = XX-XclimR

	return AA,XclimR,lat,lon,lev,DR
コード例 #4
0
ファイル: MJO.py プロジェクト: timhoar/DARTpy
def load_std(E,std_mode = 'NODA',hostname='taurus',verbose=False):

	"""
	This subroutine returns the standard deviation of whatever variable is given in E['variable'], 
	for each time given in E['daterange'].
	There are several ways to compute the standard deviation, and that's determined by the input 
	'std_mode':
		std_mode='ensemble' simply computes the standard deviation of the DART ensemble  
			at each time 
		if you set std_mode to any other string, it looks up the multi-year experiment corresponding 
			to that string using the subroutine 'std_runs' in the user 
			module experiment_settings. 
			In this case, the standard deviation  
			is computed for each time over several years, rather than an ensemble 

	"""
	if std_mode == 'ensemble' :
		# cycle over the dates in the experiment dictionary 
		# and load the ensemble mean of the corresponding No-assimilation case 
		# TODO: a subroutine that returns the corresponding NODA experiment for each case  
		Xlist = []	
		ECLIM = E.copy()
		ECLIM['copystring'] = 'ensemble std'
		Xclim,lat,lon,lev,DRnew = DSS.DART_diagn_to_array(ECLIM,hostname=hostname,debug=verbose)
		if len(DRnew) != len(ECLIM['daterange']):
			print('NOTE: not all requested data were found; returning a revised datarange')
		if Xclim is None:
			print('Cannot find data for experiment '+E['exp_name'])
			return None, None, None, None

	if std_mode == 'F_W4_L66' :

		# find the corresponding dataset  
		ff = es.std_runs(std_mode,hostname=hostname,debug=verbose)

		# load the desired variables 
		from netCDF4 import Dataset
		f = Dataset(ff,'r')
		lat = f.variables['lat'][:]
		lon = f.variables['lon'][:]
		lev = f.variables['lev'][:]
		time = f.variables['time'][:]

		variable = E['variable']
		if E['variable'] == 'US':
			variable = 'U'
		if E['variable'] == 'VS':
			variable = 'V'
		if E['variable'] == 'OLR':
			variable = 'FLUT'
		VV = f.variables[variable][:]
		f.close()

		# choose the times corresponding to the daterange in E
		d0 = E['daterange'][0].timetuple().tm_yday	# day in the year where we start  
		nT = len(E['daterange'])
		df = E['daterange'][nT-1].timetuple().tm_yday	# day in the year where we start  

		# if df<d0, we have to cycle back to the beginning of the year
		if df < d0:
			day_indices = list(range(d0-1,365))+list(range(0,df))
		else:
			day_indices = list(range(d0-1,df))

		# also choose the lat, lon, and level ranges corresponding to those in E
		if E['levrange'] is not None:
			if E['levrange'][0] == E['levrange'][1]:
				ll = E['levrange'][0]
				idx = (np.abs(lev-ll)).argmin()
				lev2 = lev[idx]
				k1 = idx
				k2 = idx
			else:
				highest_level_index = (np.abs(lev-E['levrange'][1])).argmin()
				lowest_level_index = (np.abs(lev-E['levrange'][0])).argmin()
				# which index is k1 or k2 depends on the direction of lev 
				if highest_level_index > lowest_level_index:
					k2 = highest_level_index
					k1 = lowest_level_index
				if highest_level_index < lowest_level_index:
					k1 = highest_level_index
					k2 = lowest_level_index
				lev2 = lev[k1:k2+1]

		j2 = (np.abs(lat-E['latrange'][1])).argmin()
		j1 = (np.abs(lat-E['latrange'][0])).argmin()
		lat2 = lat[j1:j2+1]
		i2 = (np.abs(lon-E['lonrange'][1])).argmin()
		i1 = (np.abs(lon-E['lonrange'][0])).argmin()
		lon2 = lon[i1:i2+1]

		if len(VV.shape) == 4:
			Xclim = VV[day_indices,k1:k2+1,j1:j2+1,i1:i2+1]
		else:
			Xclim = VV[day_indices,j1:j2+1,i1:i2+1]

		# in this case, we don't need to change the daterange  
		DRnew = E['daterange']

	return Xclim,lat,lon,lev,DRnew
コード例 #5
0
ファイル: MJO.py プロジェクト: timhoar/DARTpy
def load_climatology(E,climatology_option = 'NODA',hostname='taurus',verbose=False):

	"""
	Load a climatology option for a given DART experiment. 
	The choice of climatology is given by 'climatology_option'. Choices are:  
	'NODA' (default): take the ensemble mean of the corresponding no-DA experiment as a N-year climatology  
	'F_W4_L66': CESM-WACCM simulation with observed forcings, 1951-2010 (perfomed by Wuke Wang)  
	"""
	climatology_option_not_found = True

	if climatology_option == 'NODA' :
		climatology_option_not_found = False
		# cycle over the dates in the experiment dictionary 
		# and load the ensemble mean of the corresponding No-assimilation case 
		# TODO: a subroutine that returns the corresponding NODA experiment for each case  
		Xlist = []	
		ECLIM = E.copy()
		ECLIM['exp_name'] = 'W0910_NODA'
		ECLIM['diagn'] = 'Prior'
		ECLIM['copystring'] = 'ensemble mean'
		Xclim,lat,lon,lev,DRnew = DSS.DART_diagn_to_array(ECLIM,hostname=hostname,debug=verbose)
		if len(DRnew) != len(ECLIM['daterange']):
			print('NOTE: not all requested data were found; returning a revised datarange')
		if Xclim is None:
			print('Cannot find data for climatology option '+climatology_option+' and experiment '+E['exp_name'])
			return None, None, None, None

	if climatology_option == 'F_W4_L66' :
		from netCDF4 import Dataset
		climatology_option_not_found = False
		# in this case, load a single daily climatology calculated from this CESM-WACCM simulation  
		ff = '/data/c1/lneef/CESM/F_W4_L66/atm/climatology/F_W4_L66.cam.h1.1951-2010.daily_climatology.nc'
		f = Dataset(ff,'r')
		lat = f.variables['lat'][:]
		lon = f.variables['lon'][:]
		lev = f.variables['lev'][:]
		time = f.variables['time'][:]

		# load climatology of the desired model variable  
		variable = E['variable']
		if E['variable'] == 'US':
			variable = 'U'
		if E['variable'] == 'VS':
			variable = 'V'
		if E['variable'] == 'OLR':
			variable = 'FLUT'
		VV = f.variables[variable][:]
		f.close()

		# choose the times corresponding to the daterange in E
		d0 = E['daterange'][0].timetuple().tm_yday	# day in the year where we start  
		nT = len(E['daterange'])
		df = E['daterange'][nT-1].timetuple().tm_yday	# day in the year where we start  

		# if df<d0, we have to cycle back to the beginning of the year
		if df < d0:
			day_indices = list(range(d0-1,365))+list(range(0,df))
		else:
			day_indices = list(range(d0-1,df))

		# also choose the lat, lon, and level ranges corresponding to those in E
		if E['levrange'] is not None:
			if E['levrange'][0] == E['levrange'][1]:
				ll = E['levrange'][0]
				idx = (np.abs(lev-ll)).argmin()
				lev2 = lev[idx]
				k1 = idx
				k2 = idx
			else:
				highest_level_index = (np.abs(lev-E['levrange'][1])).argmin()
				lowest_level_index = (np.abs(lev-E['levrange'][0])).argmin()
				# which index is k1 or k2 depends on the direction of lev 
				if highest_level_index > lowest_level_index:
					k2 = highest_level_index
					k1 = lowest_level_index
				if highest_level_index < lowest_level_index:
					k1 = highest_level_index
					k2 = lowest_level_index
				lev2 = lev[k1:k2+1]

		j2 = (np.abs(lat-E['latrange'][1])).argmin()
		j1 = (np.abs(lat-E['latrange'][0])).argmin()
		lat2 = lat[j1:j2+1]
		i2 = (np.abs(lon-E['lonrange'][1])).argmin()
		i1 = (np.abs(lon-E['lonrange'][0])).argmin()
		lon2 = lon[i1:i2+1]

		if len(VV.shape) == 4:
			Xclim = VV[day_indices,k1:k2+1,j1:j2+1,i1:i2+1]
		else:
			Xclim = VV[day_indices,j1:j2+1,i1:i2+1]

		# in this case, we don't need to change the daterange  
		DRnew = E['daterange']

	if climatology_option_not_found:
		print('Climatology option '+climatology_option+' has not been coded yet. Returning None for climatology.')
		return None, None, None, None

	return Xclim,lat,lon,lev,DRnew
コード例 #6
0
ファイル: TIL.py プロジェクト: LisaNeef/DART-state-space
def Nsq_forcing_from_Q(E,datetime_in=None,debug=False,hostname='taurus'):

	"""
	Birner (2010) used the thermodynamic equation in the TEM form to derive an expression 
	for the rate of change of static stability (N2) due to residual motion and diabatic heating. 

	This subroutine compares the term due to diabatic heating, i.e.: 
	g d(Q/theta)dz

	INPUTS:
	E: a DART experiment dictionary. Relevant fields are:
		E['exp_name'] - the experiment name
		E['daterange'] - helps to choose which date to load in case this isn't specifically given
		E['variable'] - this determines what kind of diabatic heating we use:
			the value of E['variable'] should be a string like 'Nsq_forcing_XXXXX'
			where XXXXX is the model variable corresponding to whatever diabatic 
			heating type we are looking for. 
			For example, in WACCM, 'QRL_TOT' is the total longwave heating, so to get the 
			N2 forcing from that, just set E['variable']='Nsq_forcing_QRL_TOT'
	datetime_in: the date for which we want to compute this diagnostic. 
		default is None -- in this case, just choose the fist date in E['daterange']


	OUTPUTS:
	N2_forcing: Nsquared forcing term  in s^2/day
	lev
	lat 
	"""

	# necessary constants  
	H=7000.0	# scale height in m  
	p0=1000.0	# reference pressure in hPa  
	g=9.8		# acceleration of gravity 

	# load the desired diabatic heating term
	# this is not typically part of the DART output, so load from model history files
	# (right now this really only works for WACCM/CAM)  
	Qstring = E['variable'].strip('Nsq_forcing_')
	EQ = E.copy()
	EQ['variable']=Qstring
	Q2,lat,lon,lev = DSS.compute_DART_diagn_from_model_h_files(EQ,datetime_in,verbose=debug)
	# remove the time dimension, which should have length 1 
	Q = np.squeeze(Q2)

	# also load potential temperature 
	ET = E.copy()
	ET['variable']='theta'
	lev,lat,lon,theta2,P0,hybm,hyam = dart.load_DART_diagnostic_file(ET,datetime_in,hostname=hostname,debug=debug)
	# squeeze out extra dims, which we get if we load single copies (e.g. ensemble mean)
	theta = np.squeeze(theta2)

	# now find the longitude dimension and average over it  
	# for both Q and theta  
	nlon=len(lon)
	Mean_arrays = []
	for A in [Q,theta]:
		for idim,s in enumerate(A.shape):
			if s == nlon:
				londim=idim
		Mean_arrays.append(np.average(A,axis=londim))
	Q_mean=Mean_arrays[0]
	theta_mean=Mean_arrays[1]

	# if the shapes don't match up, might have to transpose one of them
#	if Mean_arrays[1].shape[0] != Q_mean.shape[0]:
#		theta_mean=np.transpose(Mean_arrays[1])
#	else:
#		theta_mean=Mean_arrays[1]
	
	# Q_mean should come out as copy x lev x lat, whereas theta_mean is copy x lat x lev  
	# to manually transpose Q_mean
	Q_mean2 = np.zeros(shape=theta_mean.shape)
	if Q_mean2.ndim==3:
		for icopy in range(theta_mean.shape[0]):
			for ilat in range(theta_mean.shape[1]):
				for ilev in range(theta_mean.shape[2]):
					Q_mean2[icopy,ilat,ilev]=Q_mean[icopy,ilev,ilat]
	else:
		for ilat in range(theta_mean.shape[0]):
			for ilev in range(theta_mean.shape[1]):
				Q_mean2[ilat,ilev]=Q_mean[ilev,ilat]
		
	# divide Q by theta
	X = Q_mean2/theta_mean

	# convert pressure levels to approximate altitude and take the vertical gradient  
	zlev = H*np.log(p0/lev)
	dZ = np.gradient(zlev)   # gradient of vertical levels in m

	# now X *should* have shape (copy x lat x lev) OR (lat x lev)
	# so need to copy dZ to look like this 
	if X.ndim==3:
		dZm = dZ[None,None,:]
		levdim=2
	if X.ndim==2:
		dZm = dZ[None,:]
		levdim=1
	dZ3 = np.broadcast_to(dZm,X.shape)
	dXdZ_3D = np.gradient(X,dZ3)
	dxdz = dXdZ_3D[levdim] # this is the vertical gradient with respect to height 

	# the above calculation yields a quantity in units s^-2/s, but it makes more sense 
	# in the grand scheme of things to look at buoyancy forcing per day, so here 
	# is a conversion factor.
	seconds_per_day = 60.*60.*24.0

	# now loop over ensemble members and compute the n2 forcing for each one
	N2_forcing = g*dxdz*seconds_per_day

	return N2_forcing,lat,lev
コード例 #7
0
ファイル: TIL.py プロジェクト: LisaNeef/DART-state-space
def Nsq_forcing_from_RC(E,datetime_in=None,debug=False,hostname='taurus'):

	"""
	Birner (2010) used the thermodynamic equation in the TEM form to derive an expression 
	for the rate of change of static stability (N2) due to residual motion and diabatic heating. 

	This subroutine compares those terms from the dynamical heating rates computed by Wuke Wang. 
	The vertical motion (wstar) term is -d(wsar*Nsq)/dz.  
	Wuke already computed WS = -wstar*HNsq/R, so it's easiest to load that data, divide out H and R, and then take the vertical gradient. 

	The horizontal term is -g d(vstar/theta * d(theta)dy)/dz. 
	Wuke already computed the heating rate term v*dtheta/dy = v*dTdy, 
	so the easiest thing to do is to multiply the heating rates by g/theta
	and then take the vertical gradient. 

	INPUTS:
	E: a DART experiment dictionary. Relevant fields are:
		E['exp_name'] - the experiment name
		E['daterange'] - helps to choose which date to load in case this isn't specifically given
		E['variable'] - if this is set to N2_forcing_vstar, the code returns the N2 forcing due to 
			meridional residual circulation. For anything else, it returns the forcing 
			due to vertical residual circulation. 
	datetime_in: the date for which we want to compute this diagnostic. 
		default is None -- in this case, just choose the fist date in E['daterange']


	OUTPUTS:
	N2_forcing: Nsquared forcing term  in s^2/day
	lev
	lat 
	"""

	# necessary constants  
	H=7000.0	# scale height in m  
	g = 9.80
	p0=1000.0	# reference pressure in hPa  

	if datetime_in is None:
		datetime_in = E['daterange'][0]

	# depending on which term we want, need to load the residual circulation component and some other stuff, 
	# and then derive a quantity for which we take the vertical gradient 
	ERC = E.copy()
	ET=E.copy()
	if E['variable'] == 'Nsq_vstar_forcing':
		ET['variable']='theta'
		lev,lat,lon,theta,P0,hybm,hyam = dart.load_DART_diagnostic_file(ET,datetime_in,hostname=hostname,debug=debug)
		ERC['variable']='VSTAR'
		vstar,lat,lev = DSS.compute_DART_diagn_from_Wang_TEM_files(ERC,datetime_in,hostname=hostname,debug=debug)

		# the above routines do not return arrays of consistent shape, so have to do 
		# some acrobatics to get everything to match up. 

		# find how the dimensions fit to the shape 
		nlon=len(lon)
		nlat=len(lat)
		nlev=len(lev)
		for idim,s in enumerate(theta.shape):
			if s==nlon:
				londim=idim
				latdim=idim
				levdim=idim
			
		# take the zonal mean of potential temp  - this should make its shape copy x lat x lev
		thetam = np.average(theta,axis=londim)

		# next step is to find the meridional gradient of theta 
		# latitude steps --> convert to distance (arclength)
		rlat = np.deg2rad(lat)
		Re = 6371000.0		# radius of Earth in m 
		y = Re*rlat
		dy = np.gradient(y)
		# need to replicate dy to suit the shape of zonal mean theta 
		dym = dy[None,:,None]
		dy3 = np.broadcast_to(dym,thetam.shape)
		# here is the gradient - need to squeeze out a possible length-1 
		# copy dimension 
		dthetady_list = np.gradient(np.squeeze(thetam),np.squeeze(dy3))

		# now find which dimension of _squeezed_ thetam corresponds to latitude - 
		# that's the gradient that we want
		# (is this a pain in the ass? Yes! But I haven't yet found a more clever approach) 
		for idim,s in enumerate(np.squeeze(thetam).shape):
			if s==nlat:
				newlatdim=idim
		dthetady = dthetady_list[newlatdim]

		# the meridional gradient of zonal mean theta then gets multiplied by vstar and g/theta. But...  
		
		# the subroutine compute_DART_diagn_from_Wang_TEM_files delivers an array with 
		# dimensions lev x lat x copy (or just levxlat)
		# whereas N2 should come out as copy x lat x lev (or simply lat x lev)
		# need to transpose this, but I don't trust np.reshape - do it manually 
		vstar2 = np.zeros(shape=dthetady.shape)
		if vstar2.ndim==3:
			for icopy in range(dthetady.shape[0]):
				for ilat in range(dthetady.shape[1]):
					for ilev in range(dthetady.shape[2]):
						vstar2[icopy,ilat,ilev]=vstar[icopy,ilev,ilat]
		else:
			for ilat in range(dthetady.shape[0]):
				for ilev in range(dthetady.shape[1]):
					vstar2[ilat,ilev]=vstar[ilev,ilat]

		X = (g/np.squeeze(thetam))*vstar2*dthetady

	else:
		
		ET['variable']='Nsq'
		lev,lat,lon,Nsq,P0,hybm,hyam = dart.load_DART_diagnostic_file(ET,datetime_in,hostname=hostname,debug=debug)
		ERC['variable']='WSTAR'
		wstar,lat,lev = DSS.compute_DART_diagn_from_Wang_TEM_files(ERC,datetime_in,hostname=hostname,debug=debug)

		# find how the dimensions fit to the shape 
		nlon=len(lon)
		nlat=len(lat)
		nlev=len(lev)
		for idim,s in enumerate(Nsq.shape):
			if s==nlon:
				londim=idim
				latdim=idim
				levdim=idim
        
		# take the zonal mean of buoyancy frequency 
		Nsqm = np.average(Nsq,axis=londim)
		
		# might have to squeeze out a length-1 copy dimension 
		Nsqm2 = np.squeeze(Nsqm)

		# the subroutine compute_DART_diagn_from_Wang_TEM_files delivers an array with dimensions lev x lat x copy (or just levxlat)
		# whereas N2 should come out as copy x lat x lev (or simply lat x lev)
		# need to transpose this, but I don't trust np.reshape - do it manually 
		wstar2 = np.zeros(shape=Nsqm2.shape)
		if wstar2.ndim==3:
			for icopy in range(Nsqm2.shape[0]):
				for ilat in range(Nsqm2.shape[1]):
					for ilev in range(Nsqm2.shape[2]):
						wstar2[icopy,ilat,ilev]=wstar[icopy,ilev,ilat]
		else:
			for ilat in range(Nsqm2.shape[0]):
				for ilev in range(Nsqm2.shape[1]):
					wstar2[ilat,ilev]=wstar[ilev,ilat]

		X = Nsqm2*wstar2

	# convert pressure levels to approximate altitude and take the vertical gradient  
	zlev = H*np.log(p0/lev)
	dZ = np.gradient(zlev)   # gradient of vertical levels in m

	# now X *should* have shape (copy x lat x lev) OR (lat x lev)
	# so need to copy dZ to look like this 
	if X.ndim==3:
		dZm = dZ[None,None,:]
		levdim=2
	if X.ndim==2:
		dZm = dZ[None,:]
		levdim=1
	dZ3 = np.broadcast_to(dZm,X.shape)
	dXdZ_3D = np.gradient(X,dZ3)
	dxdz = dXdZ_3D[levdim] # this is the vertical gradient with respect to height 

	# the above calculation yields a quantity in units s^-2/s, but it makes more sense 
	# in the grand scheme of things to look at buoyancy forcing per day, so here 
	# is a conversion factor.
	seconds_per_day = 60.*60.*24.0

	N2_forcing = -dxdz*seconds_per_day

	return N2_forcing,lat,lev
コード例 #8
0
ファイル: TIL.py プロジェクト: timhoar/DARTpy
def Nsq_forcing_from_Q(E, datetime_in=None, debug=False, hostname='taurus'):
    """
	Birner (2010) used the thermodynamic equation in the TEM form to derive an expression 
	for the rate of change of static stability (N2) due to residual motion and diabatic heating. 

	This subroutine compares the term due to diabatic heating, i.e.: 
	g d(Q/theta)dz

	INPUTS:
	E: a DART experiment dictionary. Relevant fields are:
		E['exp_name'] - the experiment name
		E['daterange'] - helps to choose which date to load in case this isn't specifically given
		E['variable'] - this determines what kind of diabatic heating we use:
			the value of E['variable'] should be a string like 'Nsq_forcing_XXXXX'
			where XXXXX is the model variable corresponding to whatever diabatic 
			heating type we are looking for. 
			For example, in WACCM, 'QRL_TOT' is the total longwave heating, so to get the 
			N2 forcing from that, just set E['variable']='Nsq_forcing_QRL_TOT'
	datetime_in: the date for which we want to compute this diagnostic. 
		default is None -- in this case, just choose the fist date in E['daterange']


	OUTPUTS:
	N2_forcing: Nsquared forcing term  in s^2/day
	lev
	lat 
	"""

    # necessary constants
    H = 7000.0  # scale height in m
    p0 = 1000.0  # reference pressure in hPa
    g = 9.8  # acceleration of gravity

    # load the desired diabatic heating term
    # this is not typically part of the DART output, so load from model history files
    # (right now this really only works for WACCM/CAM)
    Qstring = E['variable'].strip('Nsq_forcing_')
    EQ = E.copy()
    EQ['variable'] = Qstring
    DQ = DSS.compute_DART_diagn_from_model_h_files(EQ,
                                                   datetime_in,
                                                   verbose=debug)
    # remove the time dimension, which should have length 1
    DQ['data'] = np.squeeze(DQ['data'])

    # also load potential temperature
    ET = E.copy()
    ET['variable'] = 'theta'
    Dtheta = dart.load_DART_diagnostic_file(ET,
                                            datetime_in,
                                            hostname=hostname,
                                            debug=debug)
    # squeeze out extra dims, which we get if we load single copies (e.g. ensemble mean)
    Dtheta['data'] = np.squeeze(Dtheta['data'])

    # now find the longitude dimension and average over it
    # for both Q and theta
    Q_mean = DSS.average_over_named_dimension(DQ['data'], DQ['lon'])
    theta_mean = DSS.average_over_named_dimension(Dtheta['data'],
                                                  Dtheta['lon'])

    # if the shapes don't match up, might have to transpose one of them
    #	if Mean_arrays[1].shape[0] != Q_mean.shape[0]:
    #		theta_mean=np.transpose(Mean_arrays[1])
    #	else:
    #		theta_mean=Mean_arrays[1]

    # Q_mean should come out as copy x lev x lat, whereas theta_mean is copy x lat x lev
    # to manually transpose Q_mean
    Q_mean2 = np.zeros(shape=theta_mean.shape)
    if Q_mean2.ndim == 3:
        for icopy in range(theta_mean.shape[0]):
            for ilat in range(theta_mean.shape[1]):
                for ilev in range(theta_mean.shape[2]):
                    Q_mean2[icopy, ilat, ilev] = Q_mean[icopy, ilev, ilat]
    else:
        for ilat in range(theta_mean.shape[0]):
            for ilev in range(theta_mean.shape[1]):
                Q_mean2[ilat, ilev] = Q_mean[ilev, ilat]

    # divide Q by theta
    X = Q_mean2 / theta_mean

    # convert pressure levels to approximate altitude and take the vertical gradient
    lev = DQ['lev']
    zlev = H * np.log(p0 / lev)
    dZ = np.gradient(zlev)  # gradient of vertical levels in m

    # now X *should* have shape (copy x lat x lev) OR (lat x lev)
    # so need to copy dZ to look like this
    if X.ndim == 3:
        dZm = dZ[None, None, :]
        levdim = 2
    if X.ndim == 2:
        dZm = dZ[None, :]
        levdim = 1
    dZ3 = np.broadcast_to(dZm, X.shape)
    dXdZ_3D = np.gradient(X, dZ3)
    dxdz = dXdZ_3D[
        levdim]  # this is the vertical gradient with respect to height

    # the above calculation yields a quantity in units s^-2/s, but it makes more sense
    # in the grand scheme of things to look at buoyancy forcing per day, so here
    # is a conversion factor.
    seconds_per_day = 60. * 60. * 24.0

    # now loop over ensemble members and compute the n2 forcing for each one
    N2_forcing = g * dxdz * seconds_per_day

    D = dict()
    D['data'] = N2_forcing
    D['lev'] = DQ['lev']
    D['lat'] = DQ['lat']
    D['units'] = 's^{-2}/day'
    D['long_name'] = 'N^{2} Forcing'

    return D
コード例 #9
0
ファイル: TIL.py プロジェクト: timhoar/DARTpy
def Nsq_forcing_from_RC(E, datetime_in=None, debug=False, hostname='taurus'):
    """
	Birner (2010) used the thermodynamic equation in the TEM form to derive an expression 
	for the rate of change of static stability (N2) due to residual motion and diabatic heating. 

	This subroutine compares those terms from the dynamical heating rates computed by Wuke Wang. 
	The vertical motion (wstar) term is -d(wsar*Nsq)/dz.  
	Wuke already computed WS = -wstar*HNsq/R, so it's easiest to load that data, divide out H and R, and then take the vertical gradient. 

	The horizontal term is -g d(vstar/theta * d(theta)dy)/dz. 
	Wuke already computed the heating rate term v*dtheta/dy = v*dTdy, 
	so the easiest thing to do is to multiply the heating rates by g/theta
	and then take the vertical gradient. 

	INPUTS:
	E: a DART experiment dictionary. Relevant fields are:
		E['exp_name'] - the experiment name
		E['daterange'] - helps to choose which date to load in case this isn't specifically given
		E['variable'] - if this is set to N2_forcing_vstar, the code returns the N2 forcing due to 
			meridional residual circulation. For anything else, it returns the forcing 
			due to vertical residual circulation. 
	datetime_in: the date for which we want to compute this diagnostic. 
		default is None -- in this case, just choose the fist date in E['daterange']


	OUTPUTS:
	N2_forcing: Nsquared forcing term  in s^2/day
	lev
	lat 
	"""

    # necessary constants
    H = 7000.0  # scale height in m
    g = 9.80
    p0 = 1000.0  # reference pressure in hPa

    if datetime_in is None:
        datetime_in = E['daterange'][0]

    # depending on which term we want, need to load the residual circulation component and some other stuff,
    # and then derive a quantity for which we take the vertical gradient
    ERC = E.copy()
    ET = E.copy()
    if E['variable'] == 'Nsq_vstar_forcing':
        ET['variable'] = 'theta'
        Dtheta = dart.load_DART_diagnostic_file(ET,
                                                datetime_in,
                                                hostname=hostname,
                                                debug=debug)
        theta = Dtheta['data']
        lat = Dtheta['lat']
        lon = Dtheta['lon']
        lev = Dtheta['lev']
        ERC['variable'] = 'VSTAR'
        Dvstar = DSS.compute_DART_diagn_from_Wang_TEM_files(ERC,
                                                            datetime_in,
                                                            hostname=hostname,
                                                            debug=debug)
        vstar = Dvstar['data']

        # the above routines do not return arrays of consistent shape, so have to do
        # some acrobatics to get everything to match up.

        # find how the dimensions fit to the shape
        nlon = len(lon)
        nlat = len(lat)
        nlev = len(lev)
        for idim, s in enumerate(theta.shape):
            if s == nlon:
                londim = idim
                latdim = idim
                levdim = idim

        # take the zonal mean of potential temp  - this should make its shape copy x lat x lev
        thetam = np.average(theta, axis=londim)

        # next step is to find the meridional gradient of theta
        # latitude steps --> convert to distance (arclength)
        rlat = np.deg2rad(lat)
        Re = 6371000.0  # radius of Earth in m
        y = Re * rlat
        dy = np.gradient(y)
        # need to replicate dy to suit the shape of zonal mean theta
        dym = dy[None, :, None]
        dy3 = np.broadcast_to(dym, thetam.shape)
        # here is the gradient - need to squeeze out a possible length-1
        # copy dimension
        dthetady_list = np.gradient(np.squeeze(thetam), np.squeeze(dy3))

        # now find which dimension of _squeezed_ thetam corresponds to latitude -
        # that's the gradient that we want
        # (is this a pain in the ass? Yes! But I haven't yet found a more clever approach)
        for idim, s in enumerate(np.squeeze(thetam).shape):
            if s == nlat:
                newlatdim = idim
        dthetady = dthetady_list[newlatdim]

        # the meridional gradient of zonal mean theta then gets multiplied by vstar and g/theta. But...

        # the subroutine compute_DART_diagn_from_Wang_TEM_files delivers an array with
        # dimensions lev x lat x copy (or just levxlat)
        # whereas N2 should come out as copy x lat x lev (or simply lat x lev)
        # need to transpose this, but I don't trust np.reshape - do it manually
        vstar2 = np.zeros(shape=dthetady.shape)
        if vstar2.ndim == 3:
            for icopy in range(dthetady.shape[0]):
                for ilat in range(dthetady.shape[1]):
                    for ilev in range(dthetady.shape[2]):
                        vstar2[icopy, ilat, ilev] = vstar[icopy, ilev, ilat]
        else:
            for ilat in range(dthetady.shape[0]):
                for ilev in range(dthetady.shape[1]):
                    vstar2[ilat, ilev] = vstar[ilev, ilat]

        X = (g / np.squeeze(thetam)) * vstar2 * dthetady

    else:

        ET['variable'] = 'Nsq'
        D = dart.load_DART_diagnostic_file(ET,
                                           datetime_in,
                                           hostname=hostname,
                                           debug=debug)
        Nsq = D['data']
        lat = D['lat']
        lon = D['lon']
        lev = D['lev']

        ERC['variable'] = 'WSTAR'
        Dwstar = DSS.compute_DART_diagn_from_Wang_TEM_files(ERC,
                                                            datetime_in,
                                                            hostname=hostname,
                                                            debug=debug)
        wstar = Dwstar['data']

        # find how the dimensions fit to the shape
        nlon = len(lon)
        nlat = len(lat)
        nlev = len(lev)
        for idim, s in enumerate(Nsq.shape):
            if s == nlon:
                londim = idim
                latdim = idim
                levdim = idim

        # take the zonal mean of buoyancy frequency
        Nsqm = np.average(Nsq, axis=londim)

        # might have to squeeze out a length-1 copy dimension
        Nsqm2 = np.squeeze(Nsqm)

        # the subroutine compute_DART_diagn_from_Wang_TEM_files delivers an array with dimensions lev x lat x copy (or just levxlat)
        # whereas N2 should come out as copy x lat x lev (or simply lat x lev)
        # need to transpose this, but I don't trust np.reshape - do it manually
        wstar2 = np.zeros(shape=Nsqm2.shape)
        if wstar2.ndim == 3:
            for icopy in range(Nsqm2.shape[0]):
                for ilat in range(Nsqm2.shape[1]):
                    for ilev in range(Nsqm2.shape[2]):
                        wstar2[icopy, ilat, ilev] = wstar[icopy, ilev, ilat]
        else:
            for ilat in range(Nsqm2.shape[0]):
                for ilev in range(Nsqm2.shape[1]):
                    wstar2[ilat, ilev] = wstar[ilev, ilat]

        X = Nsqm2 * wstar2

    # convert pressure levels to approximate altitude and take the vertical gradient
    zlev = H * np.log(p0 / lev)
    dZ = np.gradient(zlev)  # gradient of vertical levels in m

    # now X *should* have shape (copy x lat x lev) OR (lat x lev)
    # so need to copy dZ to look like this
    if X.ndim == 3:
        dZm = dZ[None, None, :]
        levdim = 2
    if X.ndim == 2:
        dZm = dZ[None, :]
        levdim = 1
    dZ3 = np.broadcast_to(dZm, X.shape)
    dXdZ_3D = np.gradient(X, dZ3)
    dxdz = dXdZ_3D[
        levdim]  # this is the vertical gradient with respect to height

    # the above calculation yields a quantity in units s^-2/s, but it makes more sense
    # in the grand scheme of things to look at buoyancy forcing per day, so here
    # is a conversion factor.
    seconds_per_day = 60. * 60. * 24.0

    N2_forcing = -dxdz * seconds_per_day

    D = dict()
    D['data'] = N2_forcing
    D['lat'] = lat
    D['lev'] = lev
    D['units'] = 's^{-2}/day'
    D['long_name'] = 'N^{2} Forcing'

    return D