예제 #1
0
def load_vars(year):
    try:
        melt_flux = iris.load_cube(filepath + year + '_land_snow_melt_flux.nc',
                                   'Snow melt heating flux')  # W m-2
        melt_amnt = iris.load_cube(filepath + year + '_land_snow_melt_amnt.nc',
                                   'Snowmelt')  # kg m-2 TS-1 (TS = 100 s)
        melt_amnt = iris.analysis.maths.multiply(
            melt_amnt, 108.
        )  # 10800 s in 3 hrs / 100 s in a model timestep = 108 ==> melt amount per output timestep
        melt_rate = iris.load_cube(filepath + year + '_land_snow_melt_rate.nc',
                                   'Rate of snow melt on land')
        orog = iris.load_cube(filepath + 'orog.nc')
        orog = orog[0, 0, :, :]
        LSM = iris.load_cube(filepath + 'new_mask.nc')
        lsm = LSM[0, 0, :, :]
    except iris.exceptions.ConstraintMismatchError:
        print('Files not found')
    var_list = [melt_rate, melt_amnt, melt_flux, lsm, orog]
    for i in var_list:
        real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
    vars_yr = {
        'melt_flux': melt_flux[:-4, 0, :, :],
        'melt_rate': melt_rate[:-4, 0, :, :],
        'melt_amnt': melt_amnt[:-4, 0, :, :],
        'orog': orog,
        'lsm': lsm,
        'lon': real_lon,
        'lat': real_lat,
        'year': year
    }
    return vars_yr
예제 #2
0
def load_time_srs():
    melt = iris.load_cube('1998-2017_land_snow_melt_amnt_daymn.nc', 'Snowmelt')
    #melt.coord('time').convert_units('days since 1970-01-01 00:00:00', calendar = 'standard')
    Ts = iris.load_cube('1998-2017_Ts_monmean.nc', 'surface_temperature')
    Ts_max = iris.load_cube('1998-2017_Ts_monmax.nc', 'surface_temperature')
    Ts_min = iris.load_cube('1998-2017_Ts_monmin.nc', 'surface_temperature')
    Tair = iris.load_cube('1998-2017_Tair_1p5m_monmean.nc', 'air_temperature')
    Tair_max = iris.load_cube('1998-2017_Tair_1p5m_monmax.nc', 'air_temperature')
    Tair_min = iris.load_cube('1998-2017_Tair_1p5m_monmin.nc', 'air_temperature')
    for i in [Ts, Ts_max, Ts_min, Tair, Tair_min, Tair_max]:
        i.convert_units('celsius')
    RH = iris.load_cube('1998-2017_RH_1p5m_monmean.nc', 'relative_humidity')
    RH_min = iris.load_cube('1998-2017_RH_1p5m_monmin.nc', 'relative_humidity')
    RH_max = iris.load_cube('1998-2017_RH_1p5m_monmax.nc', 'relative_humidity')
    FF_10m = iris.load_cube('1998-2017_FF_10m_monmean.nc', 'wind_speed')
    FF_min = iris.load_cube('1998-2017_FF_10m_monmin.nc', 'wind_speed')
    FF_max = iris.load_cube('1998-2017_FF_10m_monmax.nc', 'wind_speed')
    FF_10m = FF_10m[:,:, :220, :220]
    FF_min = FF_min[:, :, :220, :220]
    FF_max = FF_max[:, :, :220, :220]
    # Rotate data onto standard lat/lon grid
    for i in [melt, Ts, Tair, RH, FF_10m]:
        real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
    var_dict = {'melt': melt[:,0,:,:], 'Ts': Ts[:,0,:,:], 'Ts_max': Ts_max[:,0,:,:], 'Ts_min': Ts_min[:,0,:,:], 'FF_10m': FF_10m[:,0,:,:],
                'FF_10m_min': FF_min[:,0,:,:], 'FF_10m_max': FF_max[:,0,:,:],'RH': RH[:,0,:,:], 'RH_min': RH_min[:,0,:,:], 'RH_max': RH_max[:,0,:,:],
                'Time_srs': Ts.coord('time').points, 'Tair': Tair[:,0,:,:], 'Tair_max': Tair[:,0,:,:], 'Tair_min': Tair[:,0,:,:], 'lat': real_lat, 'lon': real_lon}
    return var_dict
예제 #3
0
def melt_srs():
    if host == 'jasmin':
        filepath = '/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/output/alloutput/'
        ancil_path = '/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/output/'
        lsm_name = 'land_binary_mask'
    elif host == 'bsl':
        filepath = '/data/mac/ellgil82/hindcast/output/'
        ancil_path = filepath
        lsm_name = 'LAND MASK (No halo) (LAND=TRUE)'
    try:
        orog = iris.load_cube(ancil_path + 'orog.nc', 'surface_altitude')
        orog = orog[0, 0, :, :]
        LSM = iris.load_cube(ancil_path + 'new_mask.nc', lsm_name)
        LSM = LSM[0, 0, :, :]
    except iris.exceptions.ConstraintMismatchError:
        print('Files not found')
    tot_melt = iris.load_cube('1998-2017_land_snow_melt_amnt.nc')
    tot_melt = tot_melt[0]
    real_lon, real_lat = rotate_data(tot_melt, 1, 2)
    srs_vars = {
        'real_lon': real_lon,
        'real_lat': real_lat,
        'tot_melt': tot_melt,
        'orog': orog,
        'lsm': LSM
    }
    return srs_vars
예제 #4
0
def load_vars(year):
	# Load surface variables
	Tair = iris.load_cube( filepath + year+'_Tair_1p5m.nc', 'air_temperature')
	Ts = iris.load_cube( filepath + year+'_Ts.nc', 'surface_temperature')
	MSLP = iris.load_cube( filepath + year+'_MSLP.nc', 'air_pressure_at_sea_level')
	sfc_P = iris.load_cube(filepath  + year + '_sfc_P.nc', 'surface_air_pressure')
	FF_10m = iris.load_cube( filepath +year+'_FF_10m.nc', 'wind_speed')
	RH = iris.load_cube(filepath  + year + '_RH_1p5m.nc', 'relative_humidity')
	u = iris.load_cube(filepath  + year + '_u_10m.nc', 'x wind component (with respect to grid)')
	v = iris.load_cube(filepath  + year + '_v_10m.nc', 'y wind component (with respect to grid)')
	# Load profiles
	theta_prof = iris.load_cube(filepath + year + '_theta_full_profile.nc')
	theta_prof = theta_prof[:,:40,:,:]
	u_prof = iris.load_cube(filepath + year + '_u_wind_full_profile.nc')
	v_prof = iris.load_cube(filepath + year + '_v_wind_full_profile.nc')
	v_prof = v_prof[:,:, 1:,:]
	theta_pp = iris.load_cube(filepath + '*pe000.pp', 'air_potential_temperature')
	theta_pp = theta_pp[:,:40,:,:]
	if host == 'bsl':
		try:
			LSM = iris.load_cube(filepath + 'new_mask.nc', 'LAND MASK (No halo) (LAND=TRUE)')
			orog = iris.load_cube(filepath + 'orog.nc', 'surface_altitude')
			orog = orog[0, 0, :, :]
			LSM = LSM[0, 0, :, :]
		except iris.exceptions.ConstraintMismatchError:
			print('Files not found')
	elif host == 'jasmin':
		try:
			LSM = iris.load_cube(filepath + 'new_mask.nc', 'land_binary_mask')
			orog = iris.load_cube(filepath + 'orog.nc', 'surface_altitude')
			orog = orog[0, 0, :, :]
			LSM = LSM[0, 0, :, :]
		except iris.exceptions.ConstraintMismatchError:
			print('Files not found')
	# Rotate data onto standard lat/lon grid
	for i in [theta_prof, theta_pp, u_prof, v_prof, orog]:
		real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
	# Convert model levels to altitude
	# Take orography data and use it to create hybrid height factory instance
	auxcoord = iris.coords.AuxCoord(orog.data, standard_name=str(orog.standard_name), long_name="orography",
									var_name="orog", units=orog.units)
	for x in [theta_prof, u_prof, v_prof]:
		x.add_aux_coord(auxcoord, (np.ndim(x) - 2, np.ndim(x) - 1))
		x.add_aux_coord(theta_pp.coord('sigma'), 1)
		x.add_aux_coord(theta_pp.coord('level_height'), 1)
		factory = iris.aux_factory.HybridHeightFactory(sigma=x.coord("sigma"), delta=x.coord("level_height"), orography=x.coord("surface_altitude"))
		x.add_aux_factory(factory)  # this should produce a 'derived coordinate', 'altitude' (test this with >>> print theta)
    #Tair.convert_units('celsius')
	#Ts.convert_units('celsius')
	#MSLP.convert_units('hPa')
	#sfc_P.convert_units('hPa')
	FF_10m = FF_10m[:,:,1:,:]
	v = v[:,:,1:,:]
	WD = metpy.calc.wind_direction(u = u.data, v = v.data)
	WD = iris.cube.Cube(data = WD, standard_name='wind_from_direction')
	surf_vars_yr = {'Tair': Tair[:,0,:,:], 'Ts': Ts[:,0,:,:], 'MSLP': MSLP[:,0,:,:], 'sfc_P': sfc_P[:,0,:,:], 'FF_10m': FF_10m[:,0,:,:],
               'RH': RH[:,0,:,:], 'WD': WD[:,0,:,:], 'lon': real_lon, 'lat': real_lat, 'year': year}
	prof_vars_yr = {'lon': real_lon, 'lat': real_lat, 'year': year, 'theta': theta_prof, 'u': u_prof, 'v': v_prof, 'altitude': theta_prof.coord('altitude'), 'orog': orog, 'lsm': LSM}
	return surf_vars_yr, prof_vars_yr
예제 #5
0
def load_vars(file):
	T_air = iris.load_cube(file, 'air_temperature')
	T_surf = iris.load_cube(file, 'surface_temperature')
	T_air.convert_units('celsius')
	T_surf.convert_units('celsius')
	u = iris.load_cube(file, 'x_wind')
	v = iris.load_cube(file, 'y_wind')
	v = v[:,:400]
	old_lsm = iris.load_cube('/data/clivarm/wip/ellgil82/May_2016/Compare/CS1/km1p5/20160525T1200Z_Peninsula_km1p5_ctrl_pa000.pp', 'land_binary_mask')
	old_orog = iris.load_cube('/data/clivarm/wip/ellgil82/May_2016/Compare/CS1/km1p5/20160525T1200Z_Peninsula_km1p5_ctrl_pa000.pp', 'surface_altitude')
	new_lsm = iris.load_cube('/data/clivarm/wip/ellgil82/May_2016/Re-runs/CS2/20160522T1200Z_Peninsula_km1p5_Smith_tnuc_pa000.pp', 'land_binary_mask')
	new_orog = iris.load_cube('/data/clivarm/wip/ellgil82/May_2016/Re-runs/CS2/20160522T1200Z_Peninsula_km1p5_Smith_tnuc_pa000.pp', 'surface_altitude')
	rotate_me = [T_air, T_surf, u, v]
	for i in rotate_me:
		real_lon, real_lat = rotate_data(i, 1, 2)
	me_too = [new_lsm, new_orog, old_lsm, old_orog]
	for j in me_too:
		real_lon, real_lat = rotate_data(j, 0, 1)
	return T_air[0,:,:], T_surf[0,:,:], u[0,:,:], v[0,:,:], new_orog, new_lsm, old_orog, old_lsm,  real_lon, real_lat
예제 #6
0
def load_vars():
    try:
        melt_flux = iris.load_cube(filepath + 'LarsenB_land_snow_melt_flux.nc',
                                   'Snow melt heating flux')  # W m-2
        melt_amnt = iris.load_cube(filepath + 'LarsenB_land_snow_melt_amnt.nc',
                                   'Snowmelt')  # kg m-2 TS-1 (TS = 100 s)
        melt_amnt = iris.analysis.maths.multiply(
            melt_amnt, 108.
        )  # 10800 s in 3 hrs / 100 s in a model timestep = 108 ==> melt amount per output timestep
        HL = iris.load_cube(filepath + 'LarsenB_latent_heat.nc')
        HS = iris.load_cube(filepath + 'LarsenB_sensible_heat.nc')
        HL = iris.analysis.maths.multiply(HL, -1.)
        HS = iris.analysis.maths.multiply(HS, -1.)
        SWnet = iris.load_cube(filepath + 'LarsenB_surface_SW_net.nc')
        LWnet = iris.load_cube(filepath + 'LarsenB_surface_LW_net.nc')
        Etot = HL.data + HS.data + SWnet.data + LWnet.data
        Ts = iris.load_cube(filepath + 'LarsenB_Ts.nc')
        Tair = iris.load_cube(filepath + 'LarsenB_Tair_1p5m.nc')
        Tair.convert_units('celsius')
        Ts.convert_units('celsius')
        u = iris.load_cube(filepath + 'LarsenB_u_10m.nc')
        v = iris.load_cube(filepath + 'LarsenB_v_10m.nc')
        v = v[:, :, 1:, :]
        MSLP = iris.load_cube(filepath + 'LarsenB_MSLP.nc')
        MSLP.convert_units('hPa')
        orog = iris.load_cube(filepath + 'orog.nc')
        orog = orog[0, 0, :, :]
        LSM = iris.load_cube(filepath + 'lsm.nc')
        lsm = LSM[0, 0, :, :]
    except iris.exceptions.ConstraintMismatchError:
        print('Files not found')
    var_list = [
        SWnet, LWnet, HL, HS, melt_amnt, melt_flux, lsm, orog, Ts, u, v, MSLP
    ]
    for i in var_list:
        real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
    vars_yr = {
        'melt_flux': melt_flux[:, 0, :, :],
        'melt_amnt': melt_amnt[:, 0, :, :],
        'HL': HL[:, 0, :, :],
        'HS': HS[:, 0, :, :],
        'Etot': Etot[:, 0, :, :],
        'Ts': Ts[:, 0, :, :],
        'Tair': Tair[:, 0, :, :],
        'u': u[:, 0, :, :],
        'v': v[:, 0, :, :],
        'MSLP': MSLP[:, 0, :, :],
        'LWnet': LWnet[:, 0, :, :],
        'SWnet': SWnet[:, 0, :, :],
        'orog': orog,
        'lsm': lsm,
        'lon': real_lon,
        'lat': real_lat
    }
    return vars_yr
예제 #7
0
def orog_dif_plot():
	'''Plot spatial differences between MetUM model output using default and updated orography and coastline files during
	foehn and non-foehn conditions. Thesis Figure 4.8. '''
	# Load necessary files
	old_orog = iris.load_cube('/data/clivarm/wip/ellgil82/new_ancils/km1p5/orog/orog_original.nc', 'OROGRAPHY (/STRAT LOWER BC)')[0,0,:,:]
	new_orog = iris.load_cube('/data/clivarm/wip/ellgil82/new_ancils/km1p5/orog/new_orog_smoothed.nc', 'Height')[0, 0,:, :]
	old_lsm = iris.load_cube('/data/clivarm/wip/ellgil82/new_ancils/km1p5/lsm/lsm_original.nc', 'LAND MASK (No halo) (LAND=TRUE)')[0, 0, :, :]
	new_lsm = iris.load_cube('/data/clivarm/wip/ellgil82/new_ancils/km1p5/lsm/new_mask.nc', 'LAND MASK (No halo) (LAND=TRUE)')[0,0,:,:]
	# Set up figure
	fig, ax = plt.subplots(1, 1, figsize=(10,11.5))
	ax.spines['right'].set_visible(False)
	ax.spines['left'].set_visible(False)
	ax.spines['top'].set_visible(False)
	ax.spines['bottom'].set_visible(False)
	ax.tick_params(axis='both', which='both', length=0, labelbottom='off', labelleft='off')
	# Plot Cabinet Inlet AWS
	cube = iris.load_cube('/data/clivarm/wip/ellgil82/May_2016/Re-runs/CS2/20160522T1200Z_Peninsula_km1p5_ctrl_pa000.pp','surface_altitude')
	real_lon, real_lat = rotate_data(cube, 0, 1)
	ax.plot(-63.37105, -66.48272, markersize=15, marker='o', color='#f68080', zorder=10)
	# Calculate differences
	orog_dif = new_orog.data - old_orog.data
	lsm_dif = new_lsm.data - old_lsm.data
	# Mask data where no difference is seen
	orog_dif = ma.masked_where((lsm_dif == 0) & (new_lsm.data == 0), orog_dif)
	lsm_dif = ma.masked_where(lsm_dif == 0, lsm_dif)
	# Truncate colormap to minimise visual impact of one or two extreme values
	squished_bwr = shiftedColorMap(cmap=matplotlib.cm.bwr, min_val=-800, max_val=800, name='squished_bwr', var=orog_dif, start = .15, stop = .85)
	# Plot differences between old and new orography and LSM
	c = ax.pcolormesh(real_lon, real_lat, orog_dif, cmap='squished_bwr', vmin=-800, vmax=800, zorder = 1)
	lsm = ax.contourf(real_lon, real_lat, lsm_dif, cmap = 'bwr', vmax = 1, vmin = -1, zorder = 2)
	# Add new LSM and 25 m orography contour
	ax.contour(real_lon, real_lat, new_lsm.data, lw=3, colors='dimgrey', zorder = 3)
	ax.contour(real_lon, real_lat, new_orog.data, lw = 2, levels = [100], colors = 'dimgrey', zorder = 4)
	# Set up colour bar
	cbaxes = fig.add_axes([0.22, 0.12, 0.56, 0.03])
	cbticks = np.linspace(-800, 800, 4)
	cbticklabs = [-800, 0, 800]
	cb = plt.colorbar(c, cax=cbaxes, orientation='horizontal', ticks=cbticks)
	cb.set_ticks(cbticks, cbticklabs)
	cb.ax.set_xlabel('Surface elevation difference (m)', fontsize=30, labelpad=20, color='dimgrey')
	cb.ax.text(-0.3, 2.2, 'Area removed \nfrom new LSM', fontsize = 30, color = 'dimgrey')
	cb.ax.text(0.78, 2.2, 'Area added \nto new LSM', fontsize = 30, color = 'dimgrey')
	cb.outline.set_edgecolor('dimgrey')
	cb.outline.set_linewidth(2)
	cb.solids.set_edgecolor('face')
	cb.ax.tick_params(labelsize=30, tick1On=False, tick2On=False, labelcolor='dimgrey', pad=10)
	[l.set_visible(False) for (w, l) in enumerate(cb.ax.xaxis.get_ticklabels()) if w % 3 != 0]
	plt.subplots_adjust(bottom=0.27, left=0.11, right=0.89, top=0.95, hspace=0.05)
	plt.savefig('/users/ellgil82/figures/new_ancils/orog_difs_km1p5.png', transparent = True)
	plt.savefig('/users/ellgil82/figures/new_ancils/orog_difs_km1p5.eps', transparent = True)
	plt.savefig('/users/ellgil82/figures/new_ancils/orog_difs_km1p5.pdf', transparent=True)
	plt.show()
예제 #8
0
def spatial_foehn(calc):
	if calc == 'yes':
		dT = np.diff(surf_var['Tair'].data, n=2, axis = 0) >= 0.
		dT3 = np.diff(surf_var['Tair'].data, n=2,axis = 0) >= 3.
		Z1 = np.argmin((prof_var['altitude'][:, 110, 42].points - 2000) ** 2)
		u_Z1 = prof_var['u'][:, Z1, 110, 42].data >= 2.
		u_Z1 = np.repeat(u_Z1[2:-3], 2)
		f = np.reshape(np.tile(u_Z1, (1,1,1)), ((u_Z1.shape[0]),1,1))
		u_Z1 = np.broadcast_to(f, (dT.shape))
		RH10 = surf_var['RH'][:-2].data <= np.quantile(surf_var['RH'][:-2].data, 0.1, axis = 0)
		RH15 = surf_var['RH'][:-2].data <= np.quantile(surf_var['RH'][:-2].data, 0.15, axis = 0)
		dRH = np.diff(surf_var['RH'].data, n=2, axis = 0) <= -15
		all_cond = (((RH10 == 1.) & (dT == 1.) & (u_Z1 == 1.))| ((RH15 == 1.) & (dT3 == 1.) & (u_Z1 == 1.)) | ((dRH == 1.)  & (dT == 1.) & (u_Z1 == 1.) )) # Criteria of Turton et al. (2018) plus wind component
		total_foehn = all_cond.sum(axis = 0)
		foehn_pct = np.ma.masked_where(condition = prof_var['lsm'].data == 0., a= (total_foehn/np.float(len(dT)))*100.)
	else:
		foehn_pct = iris.load_cube(filepath + 'foehn_pct.nc')
		try:
			LSM = iris.load_cube(filepath + 'new_mask.nc')
			orog = iris.load_cube(filepath + 'orog.nc')
			orog = orog[0, 0, :, :]
			lsm = LSM[0, 0, :, :]
			for i in [orog, lsm]:
				real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
		except iris.exceptions.ConstraintMismatchError:
			print('Files not found')
	# Plot
	fig, ax = plt.subplots(figsize=(8, 8))
	CbAx = fig.add_axes([0.25, 0.18, 0.5, 0.02])
	ax.axis('off')
	Larsen_mask = np.zeros((220,220))
	Larsen_mask[40:135, 90:155] = 1.
	c = ax.pcolormesh(np.ma.masked_where((Larsen_mask == 0.), foehn_pct.data), cmap = 'OrRd', vmin = 3, vmax = 12)
	#c = ax.pcolormesh(np.ma.masked_where((prof_var['orog'].data >= 100.), foehn_pct.data), cmap = 'OrRd', vmin = 3, vmax = 12)  #divide by 20 to get mean annual number of foehn/20.
	cb = plt.colorbar(c, cax = CbAx, orientation = 'horizontal', extend = 'both', ticks = [0,5,10, 15])#cb.solids.set_edgecolor("face")
	cb.outline.set_edgecolor('dimgrey')
	cb.ax.tick_params(which='both', axis='both', labelsize=24, labelcolor='dimgrey', pad=10, size=0, tick1On=False, tick2On=False)
	cb.outline.set_linewidth(2)
	cb.ax.xaxis.set_ticks_position('bottom')
	cb.set_label('Mean foehn occurrence (% of time)', fontsize = 24,  color='dimgrey', labelpad = 20)
	ax.contour(lsm.data, levels = [1], colors = '#222222')
	ax.contour(orog.data, levels = [50], colors = '#222222')
	plt.subplots_adjust( bottom = 0.25, top = 0.95)
	if host == 'bsl':
		plt.savefig('/users/ellgil82/figures/Hindcast/foehn/foehn_occurrence_spatial_composite.png', transparent=True)
		plt.savefig('/users/ellgil82/figures/Hindcast/foehn/foehn_occurrence_spatial_composite.eps', transparent=True)
	elif host == 'jasmin':
		plt.savefig('/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/figures/foehn_occurrence_spatial_composite_surface_criteria.png', transparent=True)
		plt.savefig('/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/figures/foehn_occurrence_spatial_composite_surface_criteria.eps', transparent=True)
	plt.show()
	if calc == 'yes':
		return total_foehn, foehn_pct, all_cond
예제 #9
0
def load_time_srs():
    cloud_frac = iris.load_cube(filepath + '1998-2017_cl_frac_daymn.nc')
    IWP = iris.load_cube(filepath + '1998-2017_total_column_ice_daymn.nc')
    LWP = iris.load_cube(filepath + '1998-2017_total_column_liquid_daymn.nc')
    WVP = iris.load_cube(filepath + '1998-2017_total_column_vapour_daymn.nc')
    lsm = iris.load_cube(filepath + 'new_mask.nc')
    orog = iris.load_cube(filepath + 'orog.nc')
    orog = orog[0, 0, :, :]
    lsm = lsm[0, 0, :, :]
    # Rotate data onto standard lat/lon grid
    for i in [cloud_frac, IWP, LWP, WVP, orog, lsm]:
        real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
    var_dict = {
        'IWP': IWP[:, 0, :, :],
        'LWP': LWP[:, 0, :, :],
        'WVP': WVP[:, 0, :, :],
        'cl_frac': cloud_frac[:, 0, :, :],
        'Time_srs': cloud_frac.coord('time').points,
        'lat': real_lat,
        'lon': real_lon,
        'orog': orog,
        'lsm': lsm
    }
    return var_dict
예제 #10
0
def load_var(seas):
    # Load variables
    os.chdir(filepath)
    LWnet = iris.load_cube(filepath + seas + '_diurnal_surface_LW_net.nc',
                           'surface_net_downward_longwave_flux')
    SWnet = iris.load_cube(filepath + seas + '_diurnal_surface_SW_net.nc',
                           'Net short wave radiation flux')
    LWdown = iris.load_cube(filepath + seas + '_diurnal_surface_LW_down.nc',
                            'IR down')
    SWdown = iris.load_cube(filepath + seas + '_diurnal_surface_SW_down.nc',
                            'surface_downwelling_shortwave_flux_in_air')
    HL = iris.load_cube(filepath + seas + '_diurnal_latent_heat.nc',
                        'Latent heat flux')
    HS = iris.load_cube(filepath + seas + '_diurnal_sensible_heat.nc',
                        'surface_upward_sensible_heat_flux')
    melt_flux = iris.load_cube(
        filepath + seas + '_diurnal_land_snow_melt_flux.nc',
        'Snow melt heating flux')
    if host == 'bsl':
        try:
            LSM = iris.load_cube(filepath + 'new_mask.nc',
                                 'LAND MASK (No halo) (LAND=TRUE)')
            orog = iris.load_cube(filepath + 'orog.nc', 'surface_altitude')
            orog = orog[0, 0, :, :]
            LSM = LSM[0, 0, :, :]
        except iris.exceptions.ConstraintMismatchError:
            print('Files not found')
    elif host == 'jasmin':
        try:
            LSM = iris.load_cube(filepath + 'new_mask.nc', 'land_binary_mask')
            orog = iris.load_cube(filepath + 'orog.nc', 'surface_altitude')
            orog = orog[0, 0, :, :]
            LSM = LSM[0, 0, :, :]
        except iris.exceptions.ConstraintMismatchError:
            print('Files not found')
    # Create standardised time units
    t = [
        cftime.datetime(0, 0, 0, 0),
        cftime.datetime(3, 0, 0, 0),
        cftime.datetime(6, 0, 0, 0),
        cftime.datetime(9, 0, 0, 0),
        cftime.datetime(12, 0, 0, 0),
        cftime.datetime(15, 0, 0, 0),
        cftime.datetime(18, 0, 0, 0),
        cftime.datetime(21, 0, 0, 0)
    ]
    t_num = [0, 3, 6, 9, 12, 15, 18, 21]
    new_time = iris.coords.AuxCoord(t,
                                    long_name='time',
                                    standard_name='time',
                                    units=cf_units.Unit(
                                        'hours since 1970-01-01 00:00:00',
                                        calendar='standard'))
    T_dim = iris.coords.DimCoord(t_num,
                                 long_name='time',
                                 standard_name='time',
                                 units=cf_units.Unit(
                                     'hours since 1970-01-01 00:00:00',
                                     calendar='standard'))
    # Rotate data onto standard lat/lon grid and update times
    for i in [orog, LWnet, SWnet, HL, HS, SWdown, LWdown, melt_flux]:
        real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
    for i in [LWnet, SWnet, HL, HS, SWdown, LWdown, melt_flux]:
        try:
            i.remove_coord('time')
        except iris.exceptions.CoordinateNotFoundError:
            i.remove_coord('t')
        i.add_aux_coord(new_time, 0)
        i.attributes = {
            'north_pole': [296., 22.99],
            'name': 'solar',
            'title': 'Net short wave radiation flux',
            'CDO':
            'Climate Data Operators version 1.9.5 (http://mpimet.mpg.de/cdo)',
            'CDI':
            'Climate Data Interface version 1.9.5 (http://mpimet.mpg.de/cdi)',
            'Conventions': 'CF-1.6',
            'source': 'Unified Model Output (Vn11.1):',
            'time': '12:00',
            'date': '31/12/97'
        }
    # Calculate Etot
    Etot = iris.cube.Cube(data=LWnet.data + SWnet.data - HL.data - HS.data,
                          long_name='Total energy flux',
                          var_name='Etot',
                          units=SWnet.units)
    for n in range(3):
        Etot.add_dim_coord(SWnet.dim_coords[n], n + 1)
    Etot.add_aux_coord(SWnet.aux_coords[0], 0)
    # Flip direction of turbulent fluxes to match convention (positive towards surface)
    HS = iris.analysis.maths.multiply(HS, -1.)
    HL = iris.analysis.maths.multiply(HL, -1.)
    seas_SEB = {
        'lon': real_lon,
        'lat': real_lat,
        'seas': seas,
        'LWnet': LWnet[:, 0, :, :],
        'SWnet': SWnet[:, 0, :, :],
        'LWdown': LWdown[:, 0, :, :],
        'SWdown': SWdown[:, 0, :, :],
        'HL': HL[:, 0, :, :],
        'HS': HS[:, 0, :, :],
        'Etot': Etot[:, 0, :, :],
        'melt': melt_flux[:, 0, :, :],
        'Time_srs': t_num
    }
    return seas_SEB
예제 #11
0
LAM_SIC = iris.load_cube(filepath+date+'T0000Z_Peninsula_4km_test_inheritance_pa000.pp', 'sea_ice_area_fraction')
LAM_Ts = iris.load_cube(filepath+date+'T0000Z_Peninsula_4km_test_inheritance_pa000.pp', 'surface_temperature')
LAM_SIC = LAM_SIC[0,:,:]
try:
    LAM_LSM = iris.load_cube(filepath+'new_mask.nc', 'land_binary_mask')
    LAM_LSM = LAM_LSM[0, 0, :, :]
except iris.exceptions.ConstraintMismatchError:
    LAM_LSM = iris.load_cube(filepath+'new_mask.nc', 'LAND MASK (No halo) (LAND=TRUE)')
    LAM_LSM = LAM_LSM[0,0,:,:]

LAM_SST = LAM_Ts[0,:,:]

## Rotate projection
for var in [LAM_SIC, LAM_SST, LAM_LSM]:
    real_lon, real_lat = rotate_data(var, 0, 1)

# Mask surface temperature data over land
LAM_SST.data = np.ma.masked_where(LAM_LSM.data == 1, LAM_SST.data )
LAM_SST.convert_units('celsius')

# Compare re-gridding methods
# Remove coord_system from glm data before regridding
#glm_SST.coord(axis='y').coord_system = None
#glm_SST.coord(axis='x').coord_system = None
#glm_SIC.coord(axis='x').coord_system = None
#glm_SIC.coord(axis='y').coord_system = None

# Up- and down-sample data, respectively
#upsamp_glm_SST = glm_SST.regrid(LAM_SST, iris.analysis.Linear())
#downsamp_LAM_SST = LAM_SST.regrid(glm_SST, iris.analysis.Linear())
예제 #12
0
def load_vars(year):
    # Set up filepath
    if host == 'jasmin':
        filepath = '/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/output/alloutput/'
        ancil_path = '/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/output/'
        lsm_name = 'land_binary_mask'
    elif host == 'bsl':
        filepath = '/data/mac/ellgil82/hindcast/output/'
        ancil_path = filepath
        lsm_name = 'LAND MASK (No halo) (LAND=TRUE)'
    try:
        melt_flux = iris.load_cube(filepath + year + '_land_snow_melt_flux.nc',
                                   'Snow melt heating flux')  # W m-2
        melt_amnt = iris.load_cube(filepath + year + '_land_snow_melt_amnt.nc',
                                   'Snowmelt')  # kg m-2
        melt_amnt = iris.analysis.maths.multiply(melt_amnt, 108.)
        melt_rate = iris.load_cube(filepath + year + '_Ts.nc',
                                   'surface_temperature')
        SW_down = iris.load_cube(filepath + year + '_surface_SW_down.nc',
                                 'surface_downwelling_shortwave_flux_in_air')
        LW_down = iris.load_cube(filepath + year + '_surface_LW_down.nc',
                                 'IR down')
        cloud_cover = iris.load_cube(filepath + year + '_cl_frac.nc',
                                     'Total cloud')
        IWP = iris.load_cube(filepath + year + '_total_column_ice.nc',
                             'atmosphere_cloud_ice_content')
        LWP = iris.load_cube(filepath + year + '_total_column_liquid.nc',
                             'atmosphere_cloud_liquid_water_content')
        WVP = iris.load_cube(filepath + year + '_total_column_vapour.nc')
        #melt_rate = iris.load_cube(filepath + year + '_land_snow_melt_rate.nc', 'Rate of snow melt on land')  # kg m-2 s-1
        foehn_idx = iris.load_cube(filepath + 'FI_noFF_calc_grad.nc')
        orog = iris.load_cube(ancil_path + 'orog.nc')
        orog = orog[0, 0, :, :]
        LSM = iris.load_cube(ancil_path + 'new_mask.nc')
        LSM = LSM[0, 0, :, :]
    except iris.exceptions.ConstraintMismatchError:
        print('Files not found')
    var_list = [
        melt_rate, melt_amnt, melt_flux, SW_down, cloud_cover, IWP, LWP, WVP,
        LW_down
    ]
    for i in var_list:
        real_lon, real_lat = rotate_data(i, 2, 3)
    vars_yr = {
        'melt_flux': melt_flux[:, 0, :, :],
        'melt_rate': melt_rate[:, 0, :, :],
        'melt_amnt': melt_amnt[:, 0, :, :],
        'SW_down': SW_down[:, 0, :, :],
        'LW_down': LW_down[:, 0, :, :],
        'cl_cover': cloud_cover[:, 0, :, :],
        'IWP': IWP[:, 0, :, :],
        'LWP': LWP[:, 0, :, :],
        'WVP': WVP[:, 0, :, :],
        'foehn_idx': foehn_idx,
        'orog': orog,
        'lsm': LSM,
        'lon': real_lon,
        'lat': real_lat,
        'year': year
    }
    return vars_yr
예제 #13
0
            p[x, y] = p_value
            err[x, y] = std_err
    r2 = r**2
    return r, r2, p, err, x_masked, y_masked


r, r2, p, err, x_masked, y_masked = foehn_melt(surf['melt_amnt'][4:58440],
                                               surf['foehn_idx'])
np.savetxt(filepath + 'foehn_index_melt_correlation_r.csv', r, delimiter=',')
np.savetxt(filepath + 'foehn_index_melt_correlation_r2.csv', r2, delimiter=',')
np.savetxt(filepath + 'foehn_index_melt_correlation_p.csv', p, delimiter=',')
np.savetxt(filepath + 'foehn_index_melt_correlation_err.csv',
           err,
           delimiter=',')

real_lon, real_lat = rotate_data(surf['lsm'], 0, 1)
real_lon, real_lat = rotate_data(surf['orog'], 0, 1)

r, r2, p, err, x_masked, y_masked = foehn_melt(SON_melt.data, SON_FI.data)

correlation_maps(['1998-2017'], )


def plot_foehn_index(subplot):
    Larsen_box = np.zeros((220, 220))
    Larsen_box[40:135, 90:155] = 1.
    if subplot == False or subplot == 'no':
        fig = plt.figure(frameon=False, figsize=(
            8, 8
        ))  # !!change figure dimensions when you have a larger model domain
        fig.patch.set_visible(False)
예제 #14
0
파일: EOFs.py 프로젝트: ellgil82/Hindcast
    filepath = '/gws/nopw/j04/bas_climate/users/ellgil82/hindcast/output/alloutput/'
elif host == 'bsl':
    filepath = '/data/mac/ellgil82/hindcast/output/'

MSLP = iris.load_cube(filepath + '1998-2017_MSLP.nc')

try:
    LSM = iris.load_cube(filepath+'new_mask.nc')
    orog = iris.load_cube(filepath+'orog.nc')
    orog = orog[0,0,:,:]
    lsm = LSM[0,0,:,:]
except iris.exceptions.ConstraintMismatchError:
    print('Files not found')

for i in [lsm, orog]:
    real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)

def rmv_mn(input):
    mn = input.collapsed('time', iris.analysis.MEAN)
    anom = input - mn
    return mn, anom

melt_mn = iris.load_cube(filepath + '1998-2017_land_snow_melt_amnt_daymn.nc')
cl_mn = iris.load_cube(filepath + '1998-2017_cl_frac_daymn.nc')
MSLP_daymn = iris.load_cube(filepath + '1998-2017_MSLP_daymn.nc')
#Tair_daymn = iris.load_cube(filepath + '1998-2017_Tair_daymn.nc')
FF_daymn = iris.load_cube(filepath + '1998-2017_FF_10m_daymn.nc')



for i in [cl_mn, melt_mn, MSLP_daymn]:
예제 #15
0
DJF, MAM, JJA, SON = find_seasmean_values('1998-2017_seasmean_cl_frac.nc')

lims = {
    'cloud_fraction': (0.6, 0.95, 'Seasonal mean cloud fraction'),
    'IWP': (0.05, 0.25, 'Ice water path (g kg$^{-1}$)'),
    'LWP': (0.01, 0.15, 'Liquid water path (g kg$^{-1}$)'),
    'WVP': (2.5, 10.0, 'Water vapour path (g kg$^{-1}$)')
}

lsm = iris.load_cube(filepath + 'new_mask.nc')
orog = iris.load_cube(filepath + 'orog.nc')
orog = orog[0, 0, :, :]
lsm = lsm[0, 0, :, :]

for i in [lsm, orog]:
    real_lon, real_lat = rotate_data(i, 0, 1)

all_vars = {'lsm': lsm, 'orog': orog, 'lat': real_lat, 'lon': real_lon}


def plot_seas_cl_maps(var_name):
    fig, ax = plt.subplots(2, 2, figsize=(8, 10))
    CbAx = fig.add_axes([0.25, 0.15, 0.5, 0.02])
    ax = ax.flatten()
    for axs in ax:
        axs.axis('off')
    plot = 0
    for i, j in zip([DJF, MAM, JJA, SON], ['DJF', 'MAM', 'JJA', 'SON']):
        c = ax[plot].pcolormesh(np.mean(i.data, axis=0),
                                vmin=lims[var_name][0],
                                vmax=lims[var_name][1])
예제 #16
0
def load_vars(year):
    Tair = iris.load_cube(filepath + year + '_Tair_1p5m_daymn.nc',
                          'air_temperature')
    Ts = iris.load_cube(filepath + year + '_Ts_daymn.nc',
                        'surface_temperature')
    MSLP = iris.load_cube(filepath + year + '_MSLP_daymn.nc',
                          'air_pressure_at_sea_level')
    sfc_P = iris.load_cube(filepath + year + '_sfc_P_daymn.nc',
                           'surface_air_pressure')
    FF_10m = iris.load_cube(filepath + year + '_FF_10m_daymn.nc', 'wind_speed')
    RH = iris.load_cube(filepath + year + '_RH_1p5m_daymn.nc',
                        'relative_humidity')
    u = iris.load_cube(filepath + year + '_u_10m_daymn.nc',
                       'x wind component (with respect to grid)')
    v = iris.load_cube(filepath + year + '_v_10m_daymn.nc',
                       'y wind component (with respect to grid)')
    LWnet = iris.load_cube(filepath + year + '_surface_LW_net_daymn.nc',
                           'surface_net_downward_longwave_flux')
    SWnet = iris.load_cube(filepath + year + '_surface_SW_net_daymn.nc',
                           'Net short wave radiation flux')
    LWdown = iris.load_cube(filepath + year + '_surface_LW_down_daymn.nc',
                            'IR down')
    SWdown = iris.load_cube(filepath + year + '_surface_SW_down_daymn.nc',
                            'surface_downwelling_shortwave_flux_in_air')
    SWup = iris.load_cube(filepath + year + '_surface_SW_up_daymn.nc')
    LWup = iris.load_cube(filepath + year + '_surface_SW_up_daymn.nc')
    HL = iris.load_cube(filepath + year + '_latent_heat_daymn.nc',
                        'Latent heat flux')
    HS = iris.load_cube(filepath + year + '_sensible_heat_daymn.nc',
                        'surface_upward_sensible_heat_flux')
    melt = iris.load_cube(filepath + year + '_land_snow_melt_flux_daymn.nc',
                          'Snow melt heating flux')
    Tair.convert_units('celsius')
    Ts.convert_units('celsius')
    MSLP.convert_units('hPa')
    sfc_P.convert_units('hPa')
    FF_10m = FF_10m[:, :, 1:, :]
    v = v[:, :, 1:, :]
    var_list = [
        Tair, Ts, MSLP, sfc_P, FF_10m, RH, u, v, LWnet, SWnet, LWdown, SWdown,
        SWup, LWup, HL, HS, melt
    ]
    for i in var_list:
        real_lon, real_lat = rotate_data(i, 2, 3)
        tcoord = i.coord('time')
        tcoord.units = cf_units.Unit(
            tcoord.units.origin,
            calendar='gregorian')  # change to be compatible
        i.coord('time').convert_units('seconds since  1970-01-01 00:00:00')
    Time_srs = matplotlib.dates.num2date(
        matplotlib.dates.epoch2num(i.coord('time').points))
    WD = metpy.calc.wind_direction(u=u.data, v=v.data)
    WD = iris.cube.Cube(data=WD, standard_name='wind_from_direction')
    Etot = LWnet.data + SWnet.data - HL.data - HS.data
    Emelt_calc = np.copy(Etot)
    Emelt_calc[Ts.data < -0.025] = 0
    for turb in [HS, HL]:
        turb.data = 0 - turb.data
    Emelt_calc = iris.cube.Cube(Emelt_calc)
    Etot = iris.cube.Cube(Etot)
    vars_yr = {
        'Tair': Tair[:, 0, :, :],
        'Ts': Ts[:, 0, :, :],
        'MSLP': MSLP[:, 0, :, :],
        'sfc_P': sfc_P[:, 0, :, :],
        'FF_10m': FF_10m[:, 0, :, :],
        'RH': RH[:, 0, :, :],
        'WD': WD[:, 0, :, :],
        'LWnet': LWnet[:, 0, :, :],
        'SWnet': SWnet[:, 0, :, :],
        'SWdown': SWdown[:, 0, :, :],
        'SWup': SWup[:, 0, :, :],
        'LWdown': LWdown[:, 0, :, :],
        'HL': HL[:, 0, :, :],
        'HS': HS[:, 0, :, :],
        'Etot': Etot[:, 0, :, :],
        'Emelt': melt[:, 0, :, :],
        'LWup': LWup[:, 0, :, :],
        'lon': real_lon,
        'lat': real_lat,
        'year': year,
        'Emelt_calc': Emelt_calc[:, 0, :, :],
        'Time_srs': Time_srs
    }
    return vars_yr