def aht_regional(run, lonin=[-1.,361.], period_fac=1.): area = mc.a*mc.a*cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') # Area of grid cells #Load in data, add area to dataset data = xr.open_dataset('/scratch/rg419/Data_moist/climatologies/' + run + '.nc') data['area'] = (('lat','lon'), area) if lonin[1]>lonin[0]: lons = [data.lon[i] for i in range(len(data.lon)) if data.lon[i] >= lonin[0] and data.lon[i] < lonin[1]] else: lons = [data.lon[i] for i in range(len(data.lon)) if data.lon[i] >= lonin[0] or data.lon[i] < lonin[1]] vh = data.vcomp_temp * mc.cp_air + data.sphum_v * mc.L + data.height*data.vcomp * mc.grav dvhdy = gr.ddy(vh) aht = ((dvhdy.sum('pfull')*5000./9.8) * data.area).sum(('lon')).cumsum('lat') # vh = ((gr.ddy(data.vcomp * h).sum('pfull')*5000./9.8)) #.sel(lon=lons).sum('lon') # aht_div = gr.ddy(vh)*data.area. #aht = aht_div.cumsum('lat') aht_rm = rolling_mean(aht, int(5*period_fac)) aht_rm.plot.contourf(x='xofyear', y='lat', levels=np.arange(-1.5e16,1.6e16,1.e15)) plt.show() return aht_rm
def ke_spinup(run, months, filename='atmos_pentad'): #Load in dataset name_temp = '/scratch/rg419/Data_moist/' + run + '/run%03d/' + filename + '.nc' names = [name_temp % m for m in range(months[0], months[1])] #read data into xarray data = xr.open_mfdataset( names, decode_times=False, # no calendar so tell netcdf lib # choose how data will be broken down into manageable chunks. chunks={'time': 30}) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', data.lat), ('lon', data.lon)]) dp = xr.DataArray(np.diff(data.phalf), [('pfull', data.pfull)]) #take area mean of ke ke_av = ((data.ucomp**2. + data.vcomp**2.) * area_xr).sum( ('lat', 'lon')) / area_xr.sum(('lat', 'lon')) #integrate over pressure levels above 100hPa and over whole atmosphere ke_vint = (ke_av * dp).sum('pfull') / 9.8 ke_vint.plot() plt.xlabel('Year') plt.ylabel('Vertically integrated area mean kinetic energy') plotname = '/scratch/rg419/plots/radiation_scheme/ke_spinup_' + run + '.png' plt.savefig(plotname) plt.close() return ke_vint
def ke_partition(run, months, filename='atmos_pentad', timeav='pentad', period_fac=1.): data = time_means(run, months, filename=filename, timeav=timeav, period_fac=period_fac) totp = (data.convection_rain + data.condensation_rain) * 86400. cell_ar = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') cell_ar = xr.DataArray(cell_ar, [('lat', data.lat), ('lon', data.lon)]) uwnd = data.ucomp vwnd = data.vcomp w = VectorWind(uwnd, vwnd) uchi, vchi, upsi, vpsi = w.helmholtz() lats = [ i for i in range(len(data.lat)) if data.lat[i] >= 5. and data.lat[i] < 30. ] lons = [ i for i in range(len(data.lon)) if data.lon[i] >= 60. and data.lon[i] < 150. ] ke_chi = 0.5 * (uchi * uchi + vchi * vchi) * cell_ar ke_chi_av = ke_chi[:, :, lats, lons].sum('lat').sum('lon') / cell_ar[ lats, lons].sum('lat').sum('lon') ke_psi = 0.5 * (upsi * upsi + vpsi * vpsi) * cell_ar ke_psi_av = ke_psi[:, :, lats, lons].sum('lat').sum('lon') / cell_ar[ lats, lons].sum('lat').sum('lon') totp_av = (totp[:, lats, lons] * cell_ar[lats, lons]).sum('lat').sum( 'lon') / cell_ar[lats, lons].sum('lat').sum('lon') ke_chi_av[:, 36].plot() ke_psi_av[:, 36].plot() plt.legend(['KE_chi', 'KE_psi']) plt.xlabel('Pentad') plt.ylabel('Kinetic Energy, m2/s2') plt.savefig(plot_dir + 'KE_' + run + '.png') plt.close() totp_av.plot() plt.xlabel('Pentad') plt.ylabel('Precipitation, mm/day') plt.savefig(plot_dir + 'precip_' + run + '.png') plt.close()
def aht_eq(run): area = mc.a * mc.a * cell_area( 42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') # Area of grid cells #Load in data, add area to dataset data = xr.open_dataset('/scratch/rg419/Data_moist/climatologies/' + run + '.nc') data['area'] = (('lat', 'lon'), area) #Locate latitudes North and South of Equator lats_sh = [data.lat[i] for i in range(len(data.lat)) if data.lat[i] <= 0] lats_nh = [data.lat[i] for i in range(len(data.lat)) if data.lat[i] >= 0] # Calculate upward longwave flux at surface flux_lw_up = data.t_surf**4. * mc.stefan # Take global averages of: # Net SW at TOA +ve down toa_sw_anom = data.toa_sw - (data.toa_sw * data.area).sum( ('lat', 'lon')) / data.area.sum(('lat', 'lon')) # Net SW at surface +ve down flux_sw_anom = data.flux_sw - (data.flux_sw * data.area).sum( ('lat', 'lon')) / data.area.sum(('lat', 'lon')) # OLR +ve up olr_anom = data.olr - (data.olr * data.area).sum( ('lat', 'lon')) / data.area.sum(('lat', 'lon')) # Net LW at surface +ve down flux_lw_anom = (data.flux_lw - flux_lw_up) - ( (data.flux_lw - flux_lw_up) * data.area).sum( ('lat', 'lon')) / data.area.sum(('lat', 'lon')) # LH at surface +ve up flux_lhe_anom = data.flux_lhe - (data.flux_lhe * data.area).sum( ('lat', 'lon')) / data.area.sum(('lat', 'lon')) # SENS at surface +ve up flux_t_anom = data.flux_t - (data.flux_t * data.area).sum( ('lat', 'lon')) / data.area.sum(('lat', 'lon')) # Evaluate Atmospheric Heat Storage (AHS) ahs = gr.ddt( (data.temp * mc.cp_air + data.sphum * mc.L).sum('pfull') * 5000. / 9.8) ahs_anom = ahs - (ahs * data.area).sum(('lat', 'lon')) / data.area.sum( ('lat', 'lon')) swabs = ((toa_sw_anom - flux_sw_anom) * data.area).sel(lat=lats_sh).sum( ('lat', 'lon')) / 1.e15 olr = (olr_anom * data.area).sel(lat=lats_sh).sum(('lat', 'lon')) / 1.e15 shf = ((flux_t_anom + flux_lhe_anom - flux_lw_anom) * data.area).sel(lat=lats_sh).sum(('lat', 'lon')) / 1.e15 stor = (ahs_anom * data.area).sel(lat=lats_sh).sum(('lat', 'lon')) / 1.e15 ahteq = swabs - olr + shf - stor return ahteq, swabs, olr, shf, stor
def q_spinup(run, months, filename='atmos_pentad'): #Load in dataset name_temp = '/scratch/rg419/Data_moist/' + run + '/run%03d/' + filename + '.nc' names = [name_temp % m for m in range(months[0], months[1])] #read data into xarray data = xr.open_mfdataset( names, decode_times=False, # no calendar so tell netcdf lib # choose how data will be broken down into manageable chunks. chunks={'time': 30}) data.coords['year'] = data.time // 360 + 1 data_yr = data.groupby('year').mean(('time')) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', data.lat), ('lon', data.lon)]) dp = xr.DataArray(np.diff(data.phalf), [('pfull', data.pfull)]) p_strat = data.pfull[data.pfull <= 100.] #take area mean of q q_av = (data_yr.sphum * area_xr).sum(('lat', 'lon')) / area_xr.sum( ('lat', 'lon')) #integrate over pressure levels above 100hPa and over whole atmosphere # q_strat = (q_avs[0:24,:]*dp[0:24]*100).sum(('pfull'))/9.8 q_vint = (q_av * dp).sum('pfull') / 9.8 q_strat = (q_av * dp).sel(pfull=p_strat).sum('pfull') / 9.8 q_strat.plot() plt.xlabel('Year') plt.ylabel('Vertically integrated area mean specific humidity, kg/m^2') plotname = '/scratch/rg419/plots/spinup/qstrat_spinup_' + run + '.png' plt.savefig(plotname) plt.close() q_vint.plot() plt.xlabel('Year') plt.ylabel('Vertically integrated area mean specific humidity, kg/m^2') plotname = '/scratch/rg419/plots/spinup/q_spinup_' + run + '.png' plt.savefig(plotname) plt.close() return q_strat, q_vint
def check_mom_balance(run, years): data = mombudg_2d_an_fn(run, '16', years) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') xarea = xr.DataArray(area, [('lat', data.lat), ('lon', data.lon)]) mom_budg_horiz_av = (data * xarea).sum(('lat', 'lon')) / xarea.sum( ('lat', 'lon')) #mom_budg_sdev = np.sqrt( (np.square(data - mom_budg_horiz_av)*xarea).sum(('lat','lon'))/xarea.sum(('lat','lon')) ) #print mom_budg_sdev plt.figure() mom_budg_horiz_av.dphidx_av.plot() mom_budg_horiz_av.fv_av.plot() mom_budg_horiz_av.mom_mean.plot() mom_budg_horiz_av.mom_eddy.plot() mom_budg_horiz_av.ddamp_av.plot() mom_budg_horiz_av.mom_sum.plot() plt.legend(['dphidx', 'fv', 'mmmn', 'mmed', 'damp', 'sum']) plt.title(run) plt.ylim(-2e-5, 2e-5) #plt.savefig('/scratch/rg419/plots/momentum_budget/'+run+'.png') return mom_budg_horiz_av
def flux_spinup_fn(run, months, filename='atmos_pentad'): #Load in dataset name_temp = '/scratch/rg419/Data_moist/' + run + '/run%03d/' + filename + '.nc' names = [name_temp % m for m in range(months[0], months[1])] #read data into xarray data = xr.open_mfdataset( names, decode_times=False, # no calendar so tell netcdf lib # choose how data will be broken down into manageable chunks. chunks={'time': 30}) data.coords['year'] = data.time // 360 + 1 data_yr = data.groupby('year').mean(('time')) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', data.lat), ('lon', data.lon)]) #take area mean of fluxes sw_av = (data_yr.flux_sw * area_xr).sum(('lat', 'lon')) / area_xr.sum( ('lat', 'lon')) lw_av = (data_yr.flux_lw * area_xr).sum(('lat', 'lon')) / area_xr.sum( ('lat', 'lon')) #plot timeseries of these plt.figure(1) sw_av.plot() plt.xlabel('Year') plt.ylabel('Mean surface SW flux') plt.savefig('/scratch/rg419/plots/spinup/sw_spinup_' + run + '.png') plt.clf() plt.figure(2) lw_av.plot() plt.xlabel('Year') plt.ylabel('Mean surface LW flux') plt.savefig('/scratch/rg419/plots/spinup/lw_spinup_' + run + '.png') plt.clf() return
def flux_spinup_fn(run_fol,years): year = years[0] rundata = load_year_xr(run_fol, year) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', rundata.lat ), ('lon', rundata.lon)]) #Initialise arrays to load into sw_av = xr.DataArray(np.zeros((len(years))), [ ('year', years )]) lw_av = xr.DataArray(np.zeros((len(years))), [ ('year', years )]) for year in years: print year rundata = load_year_xr(run_fol, year) sw = rundata.flux_sw.mean(('time')) lw = rundata.flux_lw.mean(('time')) #take area mean sw_in = sw*area_xr sw_av[year-years[0]] = sw_in.sum(('lat','lon'))/area_xr.sum(('lat','lon')) lw_in = lw*area_xr lw_av[year-years[0]] = lw_in.sum(('lat','lon'))/area_xr.sum(('lat','lon')) #plot timeseries of these plt.figure(1) plt.plot(sw_av) plt.xlabel('Year') plt.ylabel('Mean surface SW flux') plt.savefig('/scratch/rg419/plots/sw_spinup.png') plt.clf() plt.figure(2) plt.plot(lw_av) plt.xlabel('Year') plt.ylabel('Mean surface LW flux') plt.savefig('/scratch/rg419/plots/lw_spinup.png') plt.clf() return
def tsurf_ev(run, months): #Load in dataset name_temp = '/scratch/rg419/Data_moist/' + run + '/run%03d/atmos_daily.nc' names = [name_temp % m for m in range( months[0], months[1]) ] #read data into xarray data = xr.open_mfdataset( names, decode_times=False, chunks={'time': 30}) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', data.lat ), ('lon', data.lon)]) #take area mean of t_surf t_surf_av = (data.t_surf*area_xr).sum(('lat','lon'))/area_xr.sum(('lat','lon')) t_surf_av.plot() plt.xlabel('Day') plt.ylabel('Global mean temperature, K') #plt.ylim([284,290]) plotname = '/scratch/rg419/plots/radiation_scheme/t_surf_ev_'+ run +'.png' plt.savefig(plotname) plt.close() return t_surf_av
def tsurf_ev(run, months): #Load in dataset name_temp = '/scratch/rg419/Data_moist/' + run + '/run%03d/atmos_monthly.nc' names = [name_temp % m for m in range( months[0], months[1]) ] #read data into xarray data = xr.open_mfdataset( names, decode_times=False) data.coords['year'] = data.time//360 + 1 data_yr = data.groupby('year').mean(('time')) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', data.lat ), ('lon', data.lon)]) #take area mean of t_surf t_surf_av = (data_yr.t_surf*area_xr).sum(('lat','lon'))/area_xr.sum(('lat','lon')) t_surf_av.plot() #plt.xlabel('Pentad') plt.ylabel('Global mean temperature, K') plotname = '/scratch/rg419/plots/radiation_scheme/t_surf_ev_'+ run +'.png' plt.savefig(plotname) plt.close() return t_surf_av
def aht_zonal(run, period_fac=1.): area = mc.a * mc.a * cell_area( 42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') # Area of grid cells #Load in data, add area to dataset data = xr.open_dataset('/scratch/rg419/Data_moist/climatologies/' + run + '.nc') data['area'] = (('lat', 'lon'), area) # Calculate upward longwave flux at surface flux_lw_up = data.t_surf**4. * mc.stefan # Evaluate Atmospheric Heat Storage (AHS) ahs = (gr.ddt( (data.temp * mc.cp_air + data.sphum * mc.L).sum('pfull') * 5000. / 9.8) * data.area).sum('lon') # SW absorbed by atmosphere swabs = ((data.toa_sw - data.flux_sw) * data.area).sum(('lon')) # Outgoing longwave olr = (data.olr * data.area).sum(('lon')) # Total upward surface heat flux shf = ((data.flux_t + data.flux_lhe - data.flux_lw + flux_lw_up) * data.area).sum(('lon')) aht_div = swabs - olr + shf - ahs aht = (aht_div - aht_div.mean('lat')).cumsum('lat') aht_rm = rolling_mean(aht, int(5 * period_fac)) aht_rm.plot.contourf(x='xofyear', y='lat', levels=np.arange(-1.5e16, 1.6e16, 1.e15)) plt.show() return aht_rm
def precip_centroid(run, period_fac=1.): """ Evaluate the precip centroid at each pentad. """ area = cell_area( 42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') # Area of grid cells #Load in data, add area to dataset data = xr.open_dataset('/scratch/rg419/Data_moist/climatologies/' + run + '.nc') data['area'] = (('lat', 'lon'), area) # Get total precip try: data['precipitation'] = data.condensation_rain + data.convection_rain except: data['precipitation'] = data.precipitation # Select latitudes over which to evaluate precip centroid lat_bound = 20. lats = [ data.lat[i] for i in range(len(data.lat)) if data.lat[i] >= -lat_bound and data.lat[i] <= lat_bound ] # Integrate precip wrt longitude precip_area_lats = (data.precipitation.sel(lat=lats) * data.area.sel(lat=lats)).sum('lon').values # Interpolate precip in latitude f = spint.interp1d(lats, precip_area_lats, axis=1, fill_value='extrapolate') lats_new = np.arange(-lat_bound, lat_bound + 0.1, 0.1) p_new = f(lats_new) p_new = xr.DataArray(p_new, coords=[data.xofyear.values, lats_new], dims=['xofyear', 'lat']) # Calculate cumulative sum of precip with latitude p_area_int = p_new.cumsum('lat') # At each time find the precipitation centroid: the latitude at which half of the area integrated precip lies North/South p_cent = np.zeros((len(p_new.xofyear.values), )) for i in range(1, len(p_new.xofyear.values) + 1): p_cent[i - 1] = p_new.lat[p_area_int.sel(xofyear=i) <= 0.5 * p_area_int.sel(xofyear=i).max('lat')].max( 'lat').values p_cent = xr.DataArray(p_cent, coords=[p_new.xofyear.values], dims=['xofyear']) # Calculate atmospheric heat transport at the equator ahteq = aht_eq(run)[0] # Calculate monthly averages ahteq.coords['month'] = np.mod(ahteq.xofyear - 1, 72. * period_fac) // (6 * period_fac) + 1 print ahteq.month ahteq_month = ahteq.groupby('month').mean(('xofyear')) p_cent.coords['month'] = np.mod(p_cent.xofyear - 1, 72. * period_fac) // (6 * period_fac) + 1 p_cent_month = p_cent.groupby('month').mean(('xofyear')) # Plot plt.plot(ahteq, p_cent, 'xk', alpha=0.7) plt.plot(ahteq_month, p_cent_month, 'xk', ms=7, mew=2) for i in range(0, len(month_list)): plt.text(ahteq_month[i] + 0.1, p_cent_month[i] + 0.1, month_list[i], fontsize=14) plt.xlabel('Atmospheric heat transport at the Equator (PW)') plt.ylabel('Precipitation centroid latitude ($^{\circ}$)') plt.grid(True, linestyle=':') plt.ylim([-15, 15]) plt.xlim([-10, 10]) plt.savefig('/scratch/rg419/plots/aht_work/aht_pcent_' + run + '.pdf', format='pdf') plt.close()
def rad_eq_t(run, pentad, lev=150, period_fac=1.): rcParams['figure.figsize'] = 15, 6.25 rcParams['font.size'] = 18 rcParams['text.usetex'] = True plot_dir = '/scratch/rg419/plots/crit_lat_test/' mkdir = sh.mkdir.bake('-p') mkdir(plot_dir) area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') #Load in data data = xr.open_dataset('/scratch/rg419/Data_moist/climatologies/'+run+'.nc') data['area'] = (('lat','lon'), area) # Radiative equilibrium temperature stefan = 5.6734e-8 t_rad_eq = (data.toa_sw/stefan) ** (1./4.) mn_dic = month_dic(1) tickspace = np.arange(13,72,18) * period_fac labels = [mn_dic[(k+5)/6 ] for k in range(13, 72, 18)] levels = np.arange(-1.5,1.6,0.25) t_rad_eq.mean('lon').plot.contourf(x='xofyear', y='lat', extend = 'both', add_labels=False, levels=np.arange(0.,301.,10.)) plt.ylabel('Latitude') plt.xlabel('') plt.yticks(np.arange(-60,61,30)) plt.xticks(tickspace,labels,rotation=25) plt.title('T, K', fontsize=17) plt.grid(True,linestyle=':') plt.tight_layout() figname = 't_rad_eq_' + run + '.pdf' plt.savefig(plot_dir + figname, format='pdf') plt.close() t_rad_eq.mean(('lon', 'xofyear')).plot() plt.ylabel('Latitude') plt.xlabel('Radiative equilibrium temperature') figname = 't_rad_eq_mean_' + run + '.pdf' plt.savefig(plot_dir + figname, format='pdf') plt.close() data.t_surf.mean('lon').plot.contourf(x='xofyear', y='lat', extend = 'both', add_labels=False, levels=np.arange(240.,311.,5.)) plt.ylabel('Latitude') plt.xlabel('') plt.yticks(np.arange(-60,61,30)) plt.xticks(tickspace,labels,rotation=25) plt.title('T, K', fontsize=17) plt.grid(True,linestyle=':') plt.tight_layout() figname = 't_surf_' + run + '.pdf' plt.savefig(plot_dir + figname, format='pdf') plt.close() delta_t_N = data.t_surf.mean('lon').max('lat') - data.t_surf.mean('lon').isel(lat=range(32,64)).min('lat') delta_t_S = data.t_surf.mean('lon').max('lat') - data.t_surf.mean('lon').isel(lat=range(0,32)).min('lat') t_mean = ((data.t_surf * data.area).sum(('lat','lon'))/data.area.sum(('lat','lon')) ).mean('xofyear') print run, t_mean.values, delta_t_S[pentad].values delta_t_N.plot(color='k') delta_t_S.plot(color='b') plt.xlabel('') plt.xticks(tickspace,labels,rotation=25) plt.ylabel('delta T, K') plt.ylim([5,65]) plt.grid(True,linestyle=':') plt.tight_layout() figname = 'deltaT_' + run + '.pdf' plt.savefig(plot_dir + figname, format='pdf') plt.close()
'r', format='NETCDF3_CLASSIC') lons = resolution_file.variables['lon'][:] lats = resolution_file.variables['lat'][:] lonbs = resolution_file.variables['lonb'][:] latbs = resolution_file.variables['latb'][:] nlon = lons.shape[0] nlat = lats.shape[0] nlonb = lonbs.shape[0] nlatb = latbs.shape[0] area = cell_area( 42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') # Area of grid cells warmpool_loc_list = [ [0., 95.] #, [5., 95.], [10., 95.], [15., 95.], [20., 95.], [25., 95.] ] for file_values in warmpool_loc_list: warmpool_array = np.zeros([nlat, nlon]) warmpool_lat_centre = file_values[0] warmpool_lon_centre = file_values[1] print warmpool_lat_centre, warmpool_lon_centre warmpool_width = 7.5 #15.
def precip_centroid(run, lat_bound=45., lonin=[-1., 361.]): area = cell_area( 42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') # Area of grid cells #Load in data, add area to dataset data = xr.open_dataset('/scratch/rg419/Data_moist/climatologies/' + run + '.nc') data['area'] = (('lat', 'lon'), area) if lonin[1] > lonin[0]: lons = [ data.lon[i] for i in range(len(data.lon)) if data.lon[i] >= lonin[0] and data.lon[i] < lonin[1] ] else: lons = [ data.lon[i] for i in range(len(data.lon)) if data.lon[i] >= lonin[0] or data.lon[i] < lonin[1] ] # Get total precip try: data['precipitation'] = data.condensation_rain + data.convection_rain except: data['precipitation'] = data.precipitation # Select latitudes over which to evaluate precip centroid lats = [ data.lat[i] for i in range(len(data.lat)) if data.lat[i] >= -lat_bound and data.lat[i] <= lat_bound ] # Integrate precip wrt longitude precip_area_lats = (data.precipitation.sel(lat=lats) * data.area.sel(lat=lats)).sel( lon=lons).sum('lon').values # Interpolate precip in latitude f = spint.interp1d(lats, precip_area_lats, axis=1, fill_value='extrapolate') lats_new = np.arange(-lat_bound, lat_bound + 0.1, 0.1) p_new = f(lats_new) p_new = xr.DataArray(p_new, coords=[data.xofyear.values, lats_new], dims=['xofyear', 'lat']) # Calculate cumulative sum of precip with latitude p_area_int = p_new.cumsum('lat') # At each time find the precipitation centroid: the latitude at which half of the area integrated precip lies North/South p_cent = np.zeros((len(p_new.xofyear.values), )) for i in range(1, len(p_new.xofyear.values) + 1): p_cent[i - 1] = p_new.lat[p_area_int.sel(xofyear=i) <= 0.5 * p_area_int.sel(xofyear=i).max('lat')].max( 'lat').values p_cent = xr.DataArray(p_cent, coords=[p_new.xofyear.values], dims=['xofyear']) return p_cent
def spinup_fn(run, field, months_list, filenames=['atmos_pentad'], plevs=[0., 2000., 'all']): # Function to open files for a specfied month range and filename. # Takes annual means def open_files(run, months, filename): name_temp = '/scratch/rg419/Data_moist/' + run + '/run%03d/' + filename + '.nc' names = [name_temp % m for m in range(months[0], months[1])] #read data into xarray data = xr.open_mfdataset(names, decode_times=False, chunks={'time': 30}) data.coords['year'] = data.time // 360 + 1 field_yr = data[field].groupby('year').mean(('time')) return field_yr, data # Combine data from files with different names (eg. atmos_monthly and atmos_pentad) into one time series arrays = [] i = 0 for filename in filenames: field_yr, data = open_files(run, months_list[i], filename) arrays.append(field_yr) i = i + 1 field_yr = xr.concat(arrays, dim='year') # Check if data is 3D and if so integrate over specfied levels try: p_levs = data.pfull[(data.pfull >= plevs[0]) & (data.pfull <= plevs[1])] dp = xr.DataArray(np.diff(data.phalf), [('pfull', field_yr.pfull)]) * 100. field_yr = (field_yr * dp).sel(pfull=p_levs).sum('pfull') / 9.8 print '3D field, vertical integral taken' three_d = True except: print '2D field' three_d = False # Calculate cell areas and take area mean area = cell_area(42, '/scratch/rg419/GFDL_model/GFDLmoistModel/') area_xr = xr.DataArray(area, [('lat', data.lat), ('lon', data.lon)]) field_av = (field_yr * area_xr).sum(('lat', 'lon')) / area_xr.sum( ('lat', 'lon')) # Plot up result and save field_av.plot() plt.xlabel('Year') plt.ylabel(field) if three_d: plotname = '/scratch/rg419/plots/spinup/' + field + '_' + str( plevs[2]) + '_spinup_' + run + '.png' else: plotname = '/scratch/rg419/plots/spinup/' + field + '_spinup_' + run + '.png' plt.savefig(plotname) plt.close() return field_av