def mip_last_jan(roms_in_file, roms_out_file, fesom_in_file, fesom_out_file): id = Dataset(roms_in_file, 'r') lon = id.variables['lon_rho'][:, :] lat = id.variables['lat_rho'][:, :] num_depth = id.variables['temp'].shape[1] id.close() num_lat = size(lat, 0) num_lon = size(lon, 1) print 'Calculating monthly averages for ROMS' roms_temp = monthly_avg_roms(roms_in_file, 'temp', [num_depth, num_lat, num_lon], 0) roms_salt = monthly_avg_roms(roms_in_file, 'salt', [num_depth, num_lat, num_lon], 0) print 'Writing ' + roms_out_file id = Dataset(roms_out_file, 'w') id.createDimension('xi_rho', size(lon, 1)) id.createDimension('eta_rho', size(lon, 0)) id.createDimension('s_rho', num_depth) id.createDimension('time', None) id.createVariable('lon_rho', 'f8', ('eta_rho', 'xi_rho')) id.variables['lon_rho'].long_name = 'longitude of rho-points' id.variables['lon_rho'].units = 'degree_east' id.variables['lon_rho'][:, :] = lon id.createVariable('lat_rho', 'f8', ('eta_rho', 'xi_rho')) id.variables['lat_rho'].long_name = 'latitude of rho-points' id.variables['lat_rho'].units = 'degree_north' id.variables['lat_rho'][:, :] = lat id.createVariable('time', 'f8', ('time')) id.variables['time'].units = 'month' id.variables['time'].description = 'DJF, MAM, JJA, SON' id.variables['time'][0] = 1 id.createVariable('temp', 'f8', ('time', 's_rho', 'eta_rho', 'xi_rho')) id.variables['temp'].units = 'degC' id.variables['temp'][0, :, :, :] = roms_temp id.createVariable('salt', 'f8', ('time', 's_rho', 'eta_rho', 'xi_rho')) id.variables['salt'].units = 'psu' id.variables['salt'][0, :, :, :] = roms_salt id.close() print 'Calculating monthly averages for FESOM' fesom_temp = monthly_avg(fesom_in_file, 'temp', 0) fesom_salt = monthly_avg(fesom_in_file, 'salt', 0) print 'Writing ' + fesom_out_file id = Dataset(fesom_out_file, 'w') id.createDimension('nodes_3d', size(fesom_temp)) id.createDimension('T', None) id.createVariable('temp', 'f8', ('T', 'nodes_3d')) id.variables['temp'].description = 'mean potential temperature' id.variables['temp'].units = 'degC' id.variables['temp'][0, :] = fesom_temp id.createVariable('salt', 'f8', ('T', 'nodes_3d')) id.variables['salt'].description = 'mean salinity' id.variables['salt'].units = 'psu' id.variables['salt'][0, :] = fesom_salt id.close()
def rcp_seaice_extent_change (): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = ['/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/', '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/', '/short/y99/kaa561/FESOM/highres_spinup/'] file_beg = 'avg.ice.mean.1996.2005.nc' file_end = 'avg.ice.mean.2091.2100.nc' # Titles expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL'] num_expts = len(directories) # Mesh parameters circumpolar = True cross_180 = False print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar, cross_180) num_elm = len(elements) print 'Reading data' print '...1996-2005' # Calculate monthly averages for September aice_nodes_beg = monthly_avg(directory_beg + file_beg, 'area', 8) n2d = size(aice_nodes_beg) aice_nodes_end = empty([num_expts, n2d]) for expt in range(num_expts): print '...' + expt_names[expt] aice_nodes_end[expt,:] = monthly_avg(directories[expt] + file_end, 'area', 8) print 'Calculating element-averages' aice_beg = empty(num_elm) aice_end = empty([num_expts, num_elm]) # Also save area of each element area_elm = empty(num_elm) for i in range(num_elm): elm = elements[i] area_elm[i] = elm.area() aice_beg[i] = (aice_nodes_beg[elm.nodes[0].id] + aice_nodes_beg[elm.nodes[1].id] + aice_nodes_beg[elm.nodes[2].id])/3.0 for expt in range(num_expts): aice_end[expt,i] = (aice_nodes_end[expt,elm.nodes[0].id] + aice_nodes_end[expt,elm.nodes[1].id] + aice_nodes_end[expt,elm.nodes[2].id])/3.0 print 'Sea ice extent:' # 1996-2005 # Select elements with concentration >= 15% flag_beg = aice_beg > 0.15 # Integrate the area of these elements and convert to million km^2 extent_beg = sum(flag_beg*area_elm)*1e-12 print '1996-2005: ' + str(extent_beg) + ' million km^2' # 2091-2100 flag_end = aice_end > 0.15 for expt in range(num_expts): extent_end = sum(flag_end[expt,:]*area_elm)*1e-12 percent_change = (extent_end - extent_beg)/extent_beg*100 print expt_names[expt] + ': ' + str(extent_end) + ' million km^2; change of ' + str(percent_change) + '%'
def aice_minmax_nsidc(): # Paths to FESOM mesh and output files directory_head = '/short/y99/kaa561/FESOM/' mesh_low = directory_head + 'mesh/low_res/' mesh_high = directory_head + 'mesh/high_res/' expt_dir = ['lowres_spinup/rep3/', 'highres_spinup/rep3/'] fesom_file = 'avg.ice.mean.nc' # 1992-2005 climatology for rep3 # Paths to reconstruct NSIDC files for each month nsidc_head1 = '/short/m68/kaa561/nsidc_aice/seaice_conc_monthly_sh_f11_' nsidc_head2 = '/short/m68/kaa561/nsidc_aice/seaice_conc_monthly_sh_f13_' nsidc_tail = '_v02r00.nc' start_year = 1992 end_year = 2005 num_years = end_year - start_year + 1 # Plotting parameters circumpolar = True mask_cavities = True deg2rad = pi / 180.0 # Make plotting patches for low-res FESOM elements_low, patches_low = make_patches(mesh_low, circumpolar, mask_cavities) # Read monthly averages for February and August of sea ice concentration # at each node file_path = directory_head + expt_dir[0] + fesom_file feb_lowres = monthly_avg(file_path, 'area', 1) aug_lowres = monthly_avg(file_path, 'area', 7) # Get values for each element feb_lowres_values = [] aug_lowres_values = [] for elm in elements_low: # Mask ice shelf cavities if not elm.cavity: # Average over 3 nodes making up this element feb_lowres_values.append( mean([ feb_lowres[elm.nodes[0].id], feb_lowres[elm.nodes[1].id], feb_lowres[elm.nodes[2].id] ])) aug_lowres_values.append( mean([ aug_lowres[elm.nodes[0].id], aug_lowres[elm.nodes[1].id], aug_lowres[elm.nodes[2].id] ])) # Repeat for high-res FESOM elements_high, patches_high = make_patches(mesh_high, circumpolar, mask_cavities) file_path = directory_head + expt_dir[1] + fesom_file feb_highres = monthly_avg(file_path, 'area', 1) aug_highres = monthly_avg(file_path, 'area', 7) feb_highres_values = [] aug_highres_values = [] for elm in elements_high: if not elm.cavity: feb_highres_values.append( mean([ feb_highres[elm.nodes[0].id], feb_highres[elm.nodes[1].id], feb_highres[elm.nodes[2].id] ])) aug_highres_values.append( mean([ aug_highres[elm.nodes[0].id], aug_highres[elm.nodes[1].id], aug_highres[elm.nodes[2].id] ])) # Read NSIDC grid id = Dataset(nsidc_head1 + '199201' + nsidc_tail, 'r') nsidc_lon = id.variables['longitude'][:, :] nsidc_lat = id.variables['latitude'][:, :] id.close() # Read Feburary and August NSIDC sea ice concentration for each year feb_nsidc = ma.empty([num_years, size(nsidc_lon, 0), size(nsidc_lat, 1)]) aug_nsidc = ma.empty([num_years, size(nsidc_lon, 0), size(nsidc_lat, 1)]) # Loop over years for year in range(start_year, end_year + 1): # Reconstruct file paths if year < 1996: feb_file = nsidc_head1 + str(year) + '02' + nsidc_tail aug_file = nsidc_head1 + str(year) + '08' + nsidc_tail else: feb_file = nsidc_head2 + str(year) + '02' + nsidc_tail aug_file = nsidc_head2 + str(year) + '08' + nsidc_tail # Read February data and mask id = Dataset(feb_file, 'r') feb_data_tmp = id.variables['seaice_conc_monthly_cdr'][0, :, :] # (This variable is masked but sea ice concentration isn't) nsidc_mask = id.variables['stdev_of_seaice_conc_monthly_cdr'][0, :, :] id.close() # Apply mask feb_data = ma.empty(shape(feb_data_tmp)) feb_data[:, :] = 0.0 feb_data[~nsidc_mask.mask] = feb_data_tmp[~nsidc_mask.mask] feb_data[nsidc_mask.mask] = ma.masked # Save result feb_nsidc[year - start_year, :, :] = feb_data[:, :] # Repeat for August id = Dataset(aug_file, 'r') aug_data_tmp = id.variables['seaice_conc_monthly_cdr'][0, :, :] nsidc_mask = id.variables['stdev_of_seaice_conc_monthly_cdr'][0, :, :] id.close() aug_data = ma.empty(shape(aug_data_tmp)) aug_data[:, :] = 0.0 aug_data[~nsidc_mask.mask] = aug_data_tmp[~nsidc_mask.mask] aug_data[nsidc_mask.mask] = ma.masked aug_nsidc[year - start_year, :, :] = aug_data[:, :] # Average over years feb_nsidc = mean(feb_nsidc, axis=0) aug_nsidc = mean(aug_nsidc, axis=0) # Make sure mask is still there feb_nsidc[nsidc_mask.mask] = ma.masked aug_nsidc[nsidc_mask.mask] = ma.masked # Polar coordinates for plotting nsidc_x = -(nsidc_lat + 90) * cos(nsidc_lon * deg2rad + pi / 2) nsidc_y = (nsidc_lat + 90) * sin(nsidc_lon * deg2rad + pi / 2) # Choose boundaries based on extent of NSIDC grid bdry1 = amax(nsidc_x[:, 0]) bdry2 = amin(nsidc_x[:, -1]) bdry3 = amin(nsidc_y[:, 0]) bdry4 = amax(nsidc_y[:, -1]) # Plot fig = figure(figsize=(16, 10)) # Low-res FESOM, February ax = fig.add_subplot(2, 3, 1, aspect='equal') img = PatchCollection(patches_low, cmap=jet) img.set_array(array(feb_lowres_values)) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') title('Low res', fontsize=24) text(-39, 0, 'February', fontsize=24, ha='right') # High-res FESOM, February ax = fig.add_subplot(2, 3, 2, aspect='equal') img = PatchCollection(patches_high, cmap=jet) img.set_array(array(feb_highres_values)) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') title('High res', fontsize=24) # NSDIC, February ax = fig.add_subplot(2, 3, 3, aspect='equal') img = pcolor(nsidc_x, nsidc_y, feb_nsidc, vmin=0, vmax=1, cmap=jet) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') title('NSIDC', fontsize=24) # Low-res FESOM, August ax = fig.add_subplot(2, 3, 4, aspect='equal') img = PatchCollection(patches_low, cmap=jet) img.set_array(array(aug_lowres_values)) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') text(-39, 0, 'August', fontsize=24, ha='right') # High-res FESOM, August ax = fig.add_subplot(2, 3, 5, aspect='equal') img = PatchCollection(patches_high, cmap=jet) img.set_array(array(aug_highres_values)) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') # NSIDC, August ax = fig.add_subplot(2, 3, 6, aspect='equal') img = pcolor(nsidc_x, nsidc_y, aug_nsidc, vmin=0, vmax=1, cmap=jet) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') # Add a colourbar at the bottom cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.04]) cbar = colorbar(img, orientation='horizontal', ticks=arange(0, 1 + 0.25, 0.25), cax=cbaxes) cbar.ax.tick_params(labelsize=16) # Main title suptitle('Sea ice concentration (1992-2005)', fontsize=30) # Make panels closer together subplots_adjust(wspace=0.05, hspace=0.05) #fig.show() fig.savefig('aice_minmax.png')
def rcp_seaice(): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = [ '/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/', '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/', '/short/y99/kaa561/FESOM/highres_spinup/' ] file_beg = 'avg.ice.mean.1996.2005.nc' file_end = 'avg.ice.mean.2091.2100.nc' # Titles for plotting expt_names = [ 'RCP 4.5 MMM', 'RCP 4.5 ACCESS', 'RCP 8.5 MMM', 'RCP 8.5 ACCESS', 'CONTROL' ] num_expts = len(directories) # FESOM plotting parameters circumpolar = True mask_cavities = True # Boundaries on plot (under polar coordinate transformation) x_min = -36.25 x_max = 36.25 y_min = -34.5 y_max = 38 # Locations to plot each experiment j_plot = [0, 0, 1, 1, 1] i_plot = [1, 2, 1, 2, 0] print 'Building mesh' elements, patches = make_patches(mesh_path, circumpolar, mask_cavities) num_ocn_elm = len(patches) print 'Reading data' print '...1996-2005' aice_nodes_beg = monthly_avg(directory_beg + file_beg, 'area', 8) n2d = size(aice_nodes_beg) # Anomalies for the rest of the experiments aice_nodes_diff = empty([num_expts, n2d]) for expt in range(num_expts): print '...' + expt_names[expt] aice_nodes_diff[expt, :] = monthly_avg(directories[expt] + file_end, 'area', 8) - aice_nodes_beg print 'Calculating element-averages' aice_beg = empty(num_ocn_elm) aice_diff = empty([num_expts, num_ocn_elm]) i = 0 for elm in elements: if not elm.cavity: aice_beg[i] = mean( array([ aice_nodes_beg[elm.nodes[0].id], aice_nodes_beg[elm.nodes[1].id], aice_nodes_beg[elm.nodes[2].id] ])) for expt in range(num_expts): aice_diff[expt, i] = mean( array([ aice_nodes_diff[expt, elm.nodes[0].id], aice_nodes_diff[expt, elm.nodes[1].id], aice_nodes_diff[expt, elm.nodes[2].id] ])) i += 1 # Truncate difference colourmap min_colour = 0 max_colour = (amax(aice_diff) + 1) / 2.0 diff_cmap = truncate_colormap(get_cmap('RdBu_r'), min_colour, max_colour) print 'Plotting' fig = figure(figsize=(10, 8)) gs = GridSpec(2, 3) gs.update(left=0.1, right=0.9, bottom=0.1, top=0.85, wspace=0.01, hspace=0.15) # 1996-2005 ax = subplot(gs[0, 0], aspect='equal') img = PatchCollection(patches, cmap='jet') img.set_array(aice_beg) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('1996-2005', fontsize=20) # Colourbar on the left cbaxes = fig.add_axes([0.02, 0.58, 0.02, 0.2]) cbar = colorbar(img, cax=cbaxes, ticks=arange(0, 1 + 0.25, 0.25)) cbar.ax.tick_params(labelsize=14) # Loop over the rest of the experiments for expt in range(num_expts): ax = subplot(gs[j_plot[expt], i_plot[expt]], aspect='equal') img = PatchCollection(patches, cmap=diff_cmap) img.set_array(aice_diff[expt, :]) img.set_clim(vmin=-1, vmax=amax(aice_diff)) img.set_edgecolor('face') ax.add_collection(img) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title(expt_names[expt], fontsize=20) if expt == 1: # Colourbar on the right cbaxes = fig.add_axes([0.92, 0.58, 0.02, 0.2]) cbar = colorbar(img, cax=cbaxes, ticks=arange(-1, 1 + 0.5, 0.5)) cbar.ax.tick_params(labelsize=14) if expt == num_expts - 1: # Text to indicate anomalies text(x_min, y_min - 3, 'anomalies (2091-2100 minus 1996-2005)', ha='left', va='top', fontsize=18) # Main title suptitle('September sea ice concentration', fontsize=28) fig.show() fig.savefig('rcp_seaice.png')
def nsidc_aice_monthly (elements, patches, file_path, month, save=False, fig_name=None): # Month names for plot titles month_name = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] # NSIDC file paths nsidc_head = '/short/m68/kaa561/nsidc_aice/seaice_conc_monthly_sh' nsidc_head_0 = nsidc_head + '_f11_' nsidc_head_1 = nsidc_head + '_f13_' nsidc_tail = '_v02r00.nc' # Degrees to radians conversion factor deg2rad = pi/180.0 # Get monthly average of the FESOM output fesom_data = monthly_avg(file_path, 'area', month) # Build an array of FESOM data values corresponding to each Element values = [] for elm in elements: # For each element not in an ice shelf cavity, append the mean value # for the 3 component Nodes if not elm.cavity: values.append(mean([fesom_data[elm.nodes[0].id], fesom_data[elm.nodes[1].id], fesom_data[elm.nodes[2].id]])) # Construct NSIDC file path if month+1 < 10: nsidc_file = nsidc_head_0 + '19950' + str(month+1) + nsidc_tail else: nsidc_file = nsidc_head_1 + '1995' + str(month+1) + nsidc_tail # Read NSIDC grid and monthly data id = Dataset(nsidc_file, 'r') nsidc_lon = id.variables['longitude'][:,:] nsidc_lat = id.variables['latitude'][:,:] nsidc_data_tmp = id.variables['seaice_conc_monthly_cdr'][0,:,:] # Read std just for the land mask nsidc_mask = id.variables['stdev_of_seaice_conc_monthly_cdr'][0,:,:] id.close() # Set land mask on NSIDC sea ice concentration nsidc_data = ma.empty(shape(nsidc_data_tmp)) nsidc_data[:,:] = 0.0 nsidc_data[~nsidc_mask.mask] = nsidc_data_tmp[~nsidc_mask.mask] nsidc_data[nsidc_mask.mask] = ma.masked # Convert to spherical coordinates nsidc_x = -(nsidc_lat+90)*cos(nsidc_lon*deg2rad+pi/2) nsidc_y = (nsidc_lat+90)*sin(nsidc_lon*deg2rad+pi/2) # Find boundaries for each side of plot based on extent of NSIDC grid bdry1 = amax(nsidc_x[:,0]) bdry2 = amin(nsidc_x[:,-1]) bdry3 = amin(nsidc_y[:,0]) bdry4 = amax(nsidc_y[:,-1]) # Set consistent colour levels lev = linspace(0, 1, num=50) # Plot fig = figure(figsize=(20,9)) # NSIDC ax = fig.add_subplot(1,2,1, aspect='equal') contourf(nsidc_x, nsidc_y, nsidc_data, lev) title('NSIDC', fontsize=24) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') # FESOM ax = fig.add_subplot(1,2,2, aspect='equal') img = PatchCollection(patches, cmap=jet) img.set_array(array(values)) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') title('FESOM', fontsize=24) # Add a horizontal colorbar at the bottom cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.04]) cbar = colorbar(img, orientation='horizontal', ticks=arange(0,1+0.25,0.25), cax=cbaxes) cbar.ax.tick_params(labelsize=20) # Add the main title suptitle(month_name[month] + ' sea ice concentration', fontsize=30) # Finished if save: fig.savefig(fig_name) else: fig.show()
def rcp_aice_minmax (): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/high_res/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = ['/short/y99/kaa561/FESOM/rcp45_M_highres/output/', '/short/y99/kaa561/FESOM/rcp45_A_highres/output/', '/short/y99/kaa561/FESOM/rcp85_M_highres/output/', '/short/y99/kaa561/FESOM/rcp85_A_highres/output/', '/short/y99/kaa561/FESOM/highres_spinup/'] file_beg = 'avg.ice.mean.1996.2005.nc' file_end = 'avg.ice.mean.2091.2100.nc' # Titles for plotting expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL'] num_expts = len(directories) # Start and end years for each period beg_years = [1996, 2005] end_years = [2091, 2100] # FESOM plotting parameters circumpolar = True mask_cavities = True # Boundaries on plot (under polar coordinate transformation) x_min = -36.25 x_max = 36.25 y_min = -34.5 y_max = 38 print 'Building mesh' elements, patches = make_patches(mesh_path, circumpolar, mask_cavities) print 'Reading data' # Get averages for February and September print '...1996-2005' fesom_feb_nodes_beg = monthly_avg(directory_beg + file_beg, 'area', 1) fesom_sep_nodes_beg = monthly_avg(directory_beg + file_beg, 'area', 8) # Calculate anomalies for the rest of the experiments fesom_feb_nodes_diff = empty([num_expts, size(fesom_feb_nodes_beg)]) fesom_sep_nodes_diff = empty([num_expts, size(fesom_sep_nodes_beg)]) for expt in range(num_expts): print '...' + expt_names[expt] fesom_feb_nodes_diff[expt,:] = monthly_avg(directories[expt] + file_end, 'area', 1) - fesom_feb_nodes_beg fesom_sep_nodes_diff[expt,:] = monthly_avg(directories[expt] + file_end, 'area', 8) - fesom_sep_nodes_beg # Find element-averages fesom_feb_beg = empty(len(patches)) fesom_sep_beg = empty(len(patches)) fesom_feb_diff = empty([num_expts, len(patches)]) fesom_sep_diff = empty([num_expts, len(patches)]) i = 0 for elm in elements: if not elm.cavity: fesom_feb_beg[i] = mean(array([fesom_feb_nodes_beg[elm.nodes[0].id], fesom_feb_nodes_beg[elm.nodes[1].id], fesom_feb_nodes_beg[elm.nodes[2].id]])) fesom_sep_beg[i] = mean(array([fesom_sep_nodes_beg[elm.nodes[0].id], fesom_sep_nodes_beg[elm.nodes[1].id], fesom_sep_nodes_beg[elm.nodes[2].id]])) for expt in range(num_expts): fesom_feb_diff[expt,i] = mean(array([fesom_feb_nodes_diff[expt,elm.nodes[0].id], fesom_feb_nodes_diff[expt,elm.nodes[1].id], fesom_feb_nodes_diff[expt,elm.nodes[2].id]])) fesom_sep_diff[expt,i] = mean(array([fesom_sep_nodes_diff[expt,elm.nodes[0].id], fesom_sep_nodes_diff[expt,elm.nodes[1].id], fesom_sep_nodes_diff[expt,elm.nodes[2].id]])) i += 1 print 'Plotting' fig = figure(figsize=(24,8)) # Feburary # 1996-2005 ax = fig.add_subplot(2, num_expts+1, 1, aspect='equal') img = PatchCollection(patches, cmap='jet') img.set_array(fesom_feb_beg) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title(str(beg_years[0])+'-'+str(beg_years[1]), fontsize=20) text(-39, 0, 'February', ha='center', va='center', rotation=90, fontsize=20) # Colourbar on the left cbaxes = fig.add_axes([0.06, 0.4, 0.015, 0.3]) cbar = colorbar(img, cax=cbaxes, ticks=arange(0, 1+0.25, 0.25)) cbar.ax.tick_params(labelsize=16) # Loop over the rest of the experiments for expt in range(num_expts): ax = fig.add_subplot(2, num_expts+1, expt+2, aspect='equal') img = PatchCollection(patches, cmap='RdBu_r') img.set_array(fesom_feb_diff[expt,:]) img.set_clim(vmin=-1, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title(expt_names[expt], fontsize=20) if expt == num_expts-1: # Colourbar on the right cbaxes = fig.add_axes([0.92, 0.4, 0.015, 0.3]) cbar = colorbar(img, cax=cbaxes, ticks=arange(-1, 1+0.5, 0.5)) cbar.ax.tick_params(labelsize=16) # September # 1996-2005 ax = fig.add_subplot(2, num_expts+1, num_expts+2, aspect='equal') img = PatchCollection(patches, cmap='jet') img.set_array(fesom_sep_beg) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) text(-39, 0, 'September', ha='center', va='center', rotation=90, fontsize=20) # Loop over the rest of the experiments for expt in range(num_expts): ax = fig.add_subplot(2, num_expts+1, num_expts+expt+3, aspect='equal') img = PatchCollection(patches, cmap='RdBu_r') img.set_array(fesom_sep_diff[expt,:]) img.set_clim(vmin=-1, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) if expt == num_expts-1: xlabel(str(end_years[0])+'-'+str(end_years[1])+' anomalies', fontsize=20) suptitle('Sea ice concentration', fontsize=30) subplots_adjust(wspace=0.025, hspace=0.025) fig.show() fig.savefig('rcp_aice_minmax.png')
def common_grid(mesh_path, output_dir, start_year, end_year, common_file, out_file): # Resolution of common grid (degrees, same for lat and lon) res = 0.25 # Northern boundary to interpolate to nbdry = -50 # Radius of the Earth in metres r = 6.371e6 # Degrees to radians conversion factor deg2rad = pi / 180.0 # Name stamped on FESOM output files expt_name = 'MK44005' print 'Calculating grids' # Make the latitude and longitude arrays for the common grid lon_common = arange(-180, 180 + res, res) lat_common = arange(-90, nbdry + res, res) # Get a 2D version of each to calculate dx and dy in metres lon_2d, lat_2d = meshgrid(lon_common, lat_common) # dx = r*cos(lat)*dlon where lat and dlon (i.e. res) are in radians dx = r * cos(lat_2d * deg2rad) * res * deg2rad # dy = r*dlat where dlat (i.e. res) is in radians # This is constant so reshape to an array of the right dimensions dy = zeros(shape(dx)) + r * res * deg2rad # Read the land mask from the existing ROMS common grid file id = Dataset(common_file, 'r') mask_common = id.variables['mask'][:, :] id.close() # Read FESOM 2D grid file = open(mesh_path + 'nod2d.out', 'r') file.readline() rlon = [] rlat = [] for line in file: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 rlon.append(lon_tmp) rlat.append(lat_tmp) file.close() rlon = array(rlon) rlat = array(rlat) n2d = size(rlon) # Unrotate grid lon_fesom, lat_fesom = unrotate_grid(rlon, rlat) print 'Setting up ' + out_file id = Dataset(out_file, 'w') id.createDimension('longitude', size(lon_common)) id.createDimension('latitude', size(lat_common)) id.createDimension('time', None) id.createVariable('longitude', 'f8', ('longitude')) id.variables['longitude'].units = 'degrees' id.variables['longitude'][:] = lon_common id.createVariable('latitude', 'f8', ('latitude')) id.variables['latitude'].units = 'degrees' id.variables['latitude'][:] = lat_common id.createVariable('time', 'f8', ('time')) id.variables['time'].units = 'months' id.createVariable('mask', 'f8', ('latitude', 'longitude')) id.variables['mask'].units = '1' id.variables['mask'][:, :] = mask_common id.createVariable('sst', 'f8', ('time', 'latitude', 'longitude')) id.variables['sst'].long_name = 'sea surface temperature' id.variables['sst'].units = 'C' id.createVariable('sss', 'f8', ('time', 'latitude', 'longitude')) id.variables['sss'].long_name = 'sea surface salinity' id.variables['sss'].units = 'psu' id.createVariable('shflux', 'f8', ('time', 'latitude', 'longitude')) id.variables['shflux'].long_name = 'surface heat flux into ocean' id.variables['shflux'].units = 'W/m^2' id.createVariable('ssflux', 'f8', ('time', 'latitude', 'longitude')) id.variables[ 'ssflux'].long_name = 'surface virtual salinity flux into ocean' id.variables['ssflux'].units = 'psu m/s' id.createVariable('aice', 'f8', ('time', 'latitude', 'longitude')) id.variables['aice'].long_name = 'sea ice concentration' id.variables['aice'].units = '1' id.createVariable('hice', 'f8', ('time', 'latitude', 'longitude')) id.variables['hice'].long_name = 'sea ice thickness' id.variables['hice'].units = 'm' id.createVariable('uocn', 'f8', ('time', 'latitude', 'longitude')) id.variables['uocn'].long_name = 'ocean surface velocity eastward' id.variables['uocn'].units = 'm/s' id.createVariable('vocn', 'f8', ('time', 'latitude', 'longitude')) id.variables['vocn'].long_name = 'ocean surface velocity northward' id.variables['vocn'].units = 'm/s' id.createVariable('uice', 'f8', ('time', 'latitude', 'longitude')) id.variables['uice'].long_name = 'sea ice velocity eastward' id.variables['uice'].units = 'm/s' id.createVariable('vice', 'f8', ('time', 'latitude', 'longitude')) id.variables['vice'].long_name = 'sea ice velocity northward' id.variables['vice'].units = 'm/s' id.createVariable('sustr', 'f8', ('time', 'latitude', 'longitude')) id.variables['sustr'].long_name = 'zonal surface stress' id.variables['sustr'].units = 'N/m^2' id.createVariable('svstr', 'f8', ('time', 'latitude', 'longitude')) id.variables['svstr'].long_name = 'meridional surface stress' id.variables['svstr'].units = 'N/m^2' id.createVariable('curl_str', 'f8', ('time', 'latitude', 'longitude')) id.variables['curl_str'].long_name = 'curl of surface stress' id.variables['curl_str'].units = 'N/m^3' for year in range(start_year, end_year + 1): print 'Processing year ' + str(year) for month in range(12): print 'Processing month ' + str(month + 1) curr_month = (year - start_year) * 12 + month # Write time value for this month id.variables['time'][curr_month] = curr_month + 1 # Construct file names oce_mean_file = output_dir + expt_name + '.' + str( year) + '.oce.mean.nc' forcing_diag_file = output_dir + expt_name + '.' + str( year) + '.forcing.diag.nc' ice_mean_file = output_dir + expt_name + '.' + str( year) + '.ice.mean.nc' print '...sea surface temperature' # Get monthly average of 3D variable temp_fesom = monthly_avg(oce_mean_file, 'temp', month) # Select surface nodes sst_fesom = temp_fesom[:n2d] # Interpolate to common grid sst_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, sst_fesom) # Apply land mask sst = ma.masked_where(mask_common == 0, sst_common) # Write to file id.variables['sst'][curr_month, :, :] = sst print '...sea surface salinity' # Get monthly average of 3D variable salt_fesom = monthly_avg(oce_mean_file, 'salt', month) # Select surface nodes sss_fesom = salt_fesom[:n2d] # Interpolate to common grid sss_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, sss_fesom) # Apply land mask sss = ma.masked_where(mask_common == 0, sss_common) # Write to file id.variables['sss'][curr_month, :, :] = sss print '...surface heat flux' # Get monthly average shflux_fesom = monthly_avg(forcing_diag_file, 'qnet', month) # Interpolate to common grid shflux_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, shflux_fesom) # Apply land mask shflux = ma.masked_where(mask_common == 0, shflux_common) # Write to file id.variables['shflux'][curr_month, :, :] = shflux print '...surface salt flux' # Get monthly average ssflux_fesom = monthly_avg(forcing_diag_file, 'virtual_salt', month) # Interpolate to common grid ssflux_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, ssflux_fesom) # Apply land mask ssflux = ma.masked_where(mask_common == 0, ssflux_common) # Write to file id.variables['ssflux'][curr_month, :, :] = ssflux print '...sea ice concentration' # Get monthly average aice_fesom = monthly_avg(ice_mean_file, 'area', month) # Interpolate to common grid aice_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, aice_fesom) # Apply land mask aice = ma.masked_where(mask_common == 0, aice_common) # Write to file id.variables['aice'][curr_month, :, :] = aice print '...sea ice thickness' # Get monthly average hice_fesom = monthly_avg(ice_mean_file, 'hice', month) # Interpolate to common grid hice_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, hice_fesom) # Apply land mask hice = ma.masked_where(mask_common == 0, hice_common) # Write to file id.variables['hice'][curr_month, :, :] = hice print '...surface ocean velocity vector' # Get monthly averages of both vector components in 3D uocn_3d_tmp = monthly_avg(oce_mean_file, 'u', month) vocn_3d_tmp = monthly_avg(oce_mean_file, 'v', month) # Select surface nodes uocn_tmp = uocn_3d_tmp[:n2d] vocn_tmp = vocn_3d_tmp[:n2d] # Unrotate uocn_fesom, vocn_fesom = unrotate_vector(rlon, rlat, uocn_tmp, vocn_tmp) # Interpolate to common grid uocn_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, uocn_fesom) vocn_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, vocn_fesom) # Apply land mask uocn = ma.masked_where(mask_common == 0, uocn_common) vocn = ma.masked_where(mask_common == 0, vocn_common) # Write to file id.variables['uocn'][curr_month, :, :] = uocn id.variables['vocn'][curr_month, :, :] = vocn print '...sea ice velocity vector' # Get monthly averages of both vector components uice_tmp = monthly_avg(ice_mean_file, 'uice', month) vice_tmp = monthly_avg(ice_mean_file, 'vice', month) # Unrotate uice_fesom, vice_fesom = unrotate_vector(rlon, rlat, uice_tmp, vice_tmp) # Interpolate to common grid uice_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, uice_fesom) vice_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, vice_fesom) # Apply land mask uice = ma.masked_where(mask_common == 0, uice_common) vice = ma.masked_where(mask_common == 0, vice_common) # Write to file id.variables['uice'][curr_month, :, :] = uice id.variables['vice'][curr_month, :, :] = vice print '...surface stress vector' # Surface stresses # Get monthly averages of both vector components sustr_tmp = monthly_avg(forcing_diag_file, 'stress_x', month) svstr_tmp = monthly_avg(forcing_diag_file, 'stress_y', month) # Unrotate sustr_fesom, svstr_fesom = unrotate_vector(rlon, rlat, sustr_tmp, svstr_tmp) # Interpolate to common grid sustr_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, sustr_fesom) svstr_common = interp_fesom2common(lon_common, lat_common, lon_fesom, lat_fesom, svstr_fesom) # Apply land mask sustr = ma.masked_where(mask_common == 0, sustr_common) svstr = ma.masked_where(mask_common == 0, svstr_common) # Write to file id.variables['sustr'][curr_month, :, :] = sustr id.variables['svstr'][curr_month, :, :] = svstr print '...curl of surface stress vector' # Curl of surface stress = d/dx (svstr) - d/dy (sustr) # First calculate the two derivatives dsvstr_dx = ma.empty(shape(svstr_common)) # Forward difference approximation dsvstr_dx[:, :-1] = (svstr_common[:, 1:] - svstr_common[:, :-1]) / dx[:, :-1] # Backward difference for the last row dsvstr_dx[:, -1] = (svstr_common[:, -1] - svstr_common[:, -2]) / dx[:, -1] dsustr_dy = ma.empty(shape(sustr_common)) dsustr_dy[:-1, :] = (sustr_common[1:, :] - sustr_common[:-1, :]) / dy[:-1, :] dsustr_dy[-1, :] = (sustr_common[-1, :] - sustr_common[-2, :]) / dy[-1, :] curl_str = dsvstr_dx - dsustr_dy curl_str = ma.masked_where(mask_common == 0, curl_str) # Write to file id.variables['curl_str'][curr_month, :, :] = curl_str print 'Finished' id.close()
def mip_aice_minmax_nsidc (cice_file, cice_log, fesom_mesh_path_lr, fesom_output_dir_lr, fesom_log_lr, fesom_mesh_path_hr, fesom_output_dir_hr, fesom_log_hr): # Range of years to process (exclude 2016 because no NSIDC data) start_year = 1992 end_year = 2015 # Starting and ending days for each month # Ignore leap years, they will be dealt with later start_day = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] end_day = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] # Beginning of FESOM output filenames fesom_file_head = 'MK44005' # Paths to reconstruct NSIDC files for each month nsidc_head1 = '/short/m68/kaa561/nsidc_aice/seaice_conc_monthly_sh_f11_' nsidc_head2 = '/short/m68/kaa561/nsidc_aice/seaice_conc_monthly_sh_f13_' nsidc_head3 = '/short/m68/kaa561/nsidc_aice/seaice_conc_monthly_sh_f17_' nsidc_tail = '_v02r00.nc' # Path to NSIDC monthly extent timeseries for February and September nsidc_feb_ts_file = '/short/m68/kaa561/nsidc_aice/S_02_extent_v2.1.csv' nsidc_sep_ts_file = '/short/m68/kaa561/nsidc_aice/S_09_extent_v2.1.csv' # FESOM plotting parameters circumpolar = True mask_cavities = True # Degrees to radians conversion factor deg2rad = pi/180.0 num_years = end_year - start_year + 1 print 'Processing NSIDC' # First read the grid id = Dataset(nsidc_head1 + '199201' + nsidc_tail, 'r') nsidc_lon = id.variables['longitude'][:,:] nsidc_lat = id.variables['latitude'][:,:] id.close() # Read February and September concentration for each year nsidc_feb = ma.empty([num_years, size(nsidc_lon,0), size(nsidc_lat,1)]) nsidc_sep = ma.empty([num_years, size(nsidc_lon,0), size(nsidc_lat,1)]) # Loop over years for year in range(start_year, end_year+1): # Reconstruct file paths if year < 1996: feb_file = nsidc_head1 + str(year) + '02' + nsidc_tail sep_file = nsidc_head1 + str(year) + '09' + nsidc_tail elif year < 2008: feb_file = nsidc_head2 + str(year) + '02' + nsidc_tail sep_file = nsidc_head2 + str(year) + '09' + nsidc_tail else: feb_file = nsidc_head3 + str(year) + '02' + nsidc_tail sep_file = nsidc_head3 + str(year) + '09' + nsidc_tail # Read February data and mask id = Dataset(feb_file, 'r') feb_data_tmp = id.variables['seaice_conc_monthly_cdr'][0,:,:] # (This variable is masked but sea ice concentration isn't) nsidc_mask = id.variables['stdev_of_seaice_conc_monthly_cdr'][0,:,:] id.close() # Apply mask feb_data = ma.empty(shape(feb_data_tmp)) feb_data[:,:] = 0.0 feb_data[~nsidc_mask.mask] = feb_data_tmp[~nsidc_mask.mask] feb_data[nsidc_mask.mask] = ma.masked # Save result nsidc_feb[year-start_year,:,:] = feb_data[:,:] # Repeat for September id = Dataset(sep_file, 'r') sep_data_tmp = id.variables['seaice_conc_monthly_cdr'][0,:,:] nsidc_mask = id.variables['stdev_of_seaice_conc_monthly_cdr'][0,:,:] id.close() sep_data = ma.empty(shape(sep_data_tmp)) sep_data[:,:] = 0.0 sep_data[~nsidc_mask.mask] = sep_data_tmp[~nsidc_mask.mask] sep_data[nsidc_mask.mask] = ma.masked nsidc_sep[year-start_year,:,:] = sep_data[:,:] # Average over years nsidc_feb = mean(nsidc_feb, axis=0) nsidc_sep = mean(nsidc_sep, axis=0) # Make sure mask is still there nsidc_feb[nsidc_mask.mask] = ma.masked nsidc_sep[nsidc_mask.mask] = ma.masked # Polar coordinates for plotting nsidc_x = -(nsidc_lat+90)*cos(nsidc_lon*deg2rad+pi/2) nsidc_y = (nsidc_lat+90)*sin(nsidc_lon*deg2rad+pi/2) # Choose boundaries based on extent of NSIDC grid bdry1 = amax(nsidc_x[:,0]) bdry2 = amin(nsidc_x[:,-1]) bdry3 = amin(nsidc_y[:,0]) bdry4 = amax(nsidc_y[:,-1]) # Now read extent timeseries nsidc_feb_extent = [] f = open(nsidc_feb_ts_file, 'r') f.readline() # Skip the years we don't care about for year in range(1979, start_year): f.readline() # Read the years we care about for year in range(start_year, end_year+1): tmp = f.readline().split(',') # Extract the extent (second last column) nsidc_feb_extent.append(float(tmp[-2])) f.close() # Repeat for September nsidc_sep_extent = [] f = open(nsidc_sep_ts_file, 'r') f.readline() for year in range(1979, start_year): f.readline() for year in range(start_year, end_year+1): tmp = f.readline().split(',') nsidc_sep_extent.append(float(tmp[-2])) f.close() print 'Processing MetROMS' # First read the grid id = Dataset(cice_file, 'r') cice_lon_tmp = id.variables['TLON'][:,:] cice_lat_tmp = id.variables['TLAT'][:,:] id.close() # Wrap the periodic boundary by 1 cell cice_lon = ma.empty([size(cice_lon_tmp,0), size(cice_lon_tmp,1)+1]) cice_lat = ma.empty([size(cice_lat_tmp,0), size(cice_lat_tmp,1)+1]) cice_lon[:,:-1] = cice_lon_tmp cice_lon[:,-1] = cice_lon_tmp[:,0] cice_lat[:,:-1] = cice_lat_tmp cice_lat[:,-1] = cice_lat_tmp[:,0] # Get averages for February and September # Start with first year just to initialise the arrays with the right size print '...monthly average for ' + str(start_year) cice_feb_tmp = monthly_avg_cice(cice_file, 'aice', shape(cice_lon_tmp), 1, instance=1) cice_sep_tmp = monthly_avg_cice(cice_file, 'aice', shape(cice_lon_tmp), 8, instance=1) # Loop over the rest of the years for year in range(start_year+1, end_year+1): print '...monthly average for ' + str(year) cice_feb_tmp = cice_feb_tmp + monthly_avg_cice(cice_file, 'aice', shape(cice_lon_tmp), 1, instance=year-start_year+1) cice_sep_tmp = cice_sep_tmp + monthly_avg_cice(cice_file, 'aice', shape(cice_lon_tmp), 8, instance=year-start_year+1) # Convert from integrals to averages cice_feb_tmp = cice_feb_tmp/num_years cice_sep_tmp = cice_sep_tmp/num_years # Wrap the periodic boundary cice_feb = ma.empty(shape(cice_lon)) cice_sep = ma.empty(shape(cice_lon)) cice_feb[:,:-1] = cice_feb_tmp cice_sep[:,:-1] = cice_sep_tmp cice_feb[:,-1] = cice_feb_tmp[:,0] cice_sep[:,-1] = cice_sep_tmp[:,0] # Polar coordinates for plotting cice_x = -(cice_lat+90)*cos(cice_lon*deg2rad+pi/2) cice_y = (cice_lat+90)*sin(cice_lon*deg2rad+pi/2) # Now get extent timeseries # Read 5-day logfile cice_time_vals = [] cice_extent_5day = [] f = open(cice_log, 'r') f.readline() for line in f: try: cice_time_vals.append(float(line)) except(ValueError): break for line in f: cice_extent_5day.append(float(line)) f.close() # Convert time to Date objects cice_time = num2date(array(cice_time_vals)*365.25, units='days since 1992-01-01 00:00:00', calendar='gregorian') # Initialise integral arrays for monthly averages # Add an extra year because the simulation goes to the end of 2016 cice_extent = zeros((num_years+1)*12) cice_ndays = zeros((num_years+1)*12) for t in range(size(cice_time)): # 5-day averages marked with the next day's date year = cice_time[t].year month = cice_time[t].month-1 # Convert to 0-indexed day = cice_time[t].day # Check for leap years leap_year = False if mod(year, 4) == 0: leap_year = True if mod(year, 100) == 0: leap_year = False if mod(year, 400) == 0: leap_year = True if leap_year: end_day[1] = 29 else: end_day[1] = 28 if day-5 < start_day[month]: # Spills over into the previous month prev_month = mod(month-1, 12) # How many days does it spill over by? spill_days = start_day[month]-day+5 # Should be between 1 and 5 if spill_days < 1 or spill_days > 5: print 'Problem: spill_days is ' + str(spill_days) print 'Timestep ' + str(t+1) print 'Year ' + str(year) print 'Month ' + str(month+1) print 'Day ' + str(day) #return # Split between previous month and this month # First find indices to update if prev_month+1 == 12: # Spilled into previous year index_prev = (year-1-start_year)*12 + prev_month else: index_prev = (year-start_year)*12 + prev_month index = (year-start_year)*12 + month # Integrate cice_extent[index_prev] += cice_extent_5day[t]*spill_days cice_ndays[index_prev] += spill_days cice_extent[index] += cice_extent_5day[t]*(5-spill_days) cice_ndays[index] += 5-spill_days else: # Entirely within the month index = (year-start_year)*12 + month cice_extent[index] += cice_extent_5day[t]*5 cice_ndays[index] += 5 # Convert from integrals to averages cice_extent /= cice_ndays # Extract February and September cice_feb_extent = [] cice_sep_extent = [] for year in range(start_year, end_year+1): cice_feb_extent.append(cice_extent[(year-start_year)*12+1]) cice_sep_extent.append(cice_extent[(year-start_year)*12+8]) print 'Processing FESOM low-res' # First build the grid elements_lr, patches_lr = make_patches(fesom_mesh_path_lr, circumpolar, mask_cavities) # Get averages for February and September # Start with first year just to initialise the arrays with the right size print '...monthly average for ' + str(start_year) fesom_feb_nodes_lr = monthly_avg(fesom_output_dir_lr + fesom_file_head + '.' + str(start_year) + '.ice.mean.nc', 'area', 1) fesom_sep_nodes_lr = monthly_avg(fesom_output_dir_lr + fesom_file_head + '.' + str(start_year) + '.ice.mean.nc', 'area', 8) # Loop over the rest of the years for year in range(start_year+1, end_year+1): print '...monthly average for ' + str(year) fesom_feb_nodes_lr = fesom_feb_nodes_lr + monthly_avg(fesom_output_dir_lr + fesom_file_head + '.' + str(year) + '.ice.mean.nc', 'area', 1) fesom_sep_nodes_lr = fesom_sep_nodes_lr + monthly_avg(fesom_output_dir_lr + fesom_file_head + '.' + str(year) + '.ice.mean.nc', 'area', 8) # Convert from integrals to averages fesom_feb_nodes_lr = fesom_feb_nodes_lr/num_years fesom_sep_nodes_lr = fesom_sep_nodes_lr/num_years # Find element-averages fesom_feb_lr = [] fesom_sep_lr = [] for elm in elements_lr: if not elm.cavity: # Average over 3 component nodes fesom_feb_lr.append(mean(array([fesom_feb_nodes_lr[elm.nodes[0].id], fesom_feb_nodes_lr[elm.nodes[1].id], fesom_feb_nodes_lr[elm.nodes[2].id]]))) fesom_sep_lr.append(mean(array([fesom_sep_nodes_lr[elm.nodes[0].id], fesom_sep_nodes_lr[elm.nodes[1].id], fesom_sep_nodes_lr[elm.nodes[2].id]]))) fesom_feb_lr = array(fesom_feb_lr) fesom_sep_lr = array(fesom_sep_lr) # Get extent timeseries # Read 5-day logfile fesom_extent_5day_lr = [] f = open(fesom_log_lr, 'r') f.readline() for line in f: fesom_extent_5day_lr.append(float(line)) f.close() # Initialise monthly arrays fesom_feb_extent_lr = zeros(num_years) fesom_sep_extent_lr = zeros(num_years) for year in range(start_year, end_year+1): # First timestep of year in 5-day logfile t0 = (year-start_year)*73 # Monthly averages are hard-coded and ugly # Feburary: 4/5 of index 7, indices 8-11, and 4/5 of index 12 fesom_feb_extent_lr[year-start_year] = (fesom_extent_5day_lr[t0+6]*4 + sum(fesom_extent_5day_lr[t0+7:t0+11]*5) + fesom_extent_5day_lr[t0+11]*4)/28.0 # September: 2/5 of index 49, indices 50-54, 3/5 of index 55 fesom_sep_extent_lr[year-start_year] = (fesom_extent_5day_lr[t0+48]*2 + sum(fesom_extent_5day_lr[t0+49:t0+54]*5) + fesom_extent_5day_lr[t0+54]*3)/30.0 print 'Processing FESOM high-res' elements_hr, patches_hr = make_patches(fesom_mesh_path_hr, circumpolar, mask_cavities) print '...monthly average for ' + str(start_year) fesom_feb_nodes_hr = monthly_avg(fesom_output_dir_hr + fesom_file_head + '.' + str(start_year) + '.ice.mean.nc', 'area', 1) fesom_sep_nodes_hr = monthly_avg(fesom_output_dir_hr + fesom_file_head + '.' + str(start_year) + '.ice.mean.nc', 'area', 8) for year in range(start_year+1, end_year+1): print '...monthly average for ' + str(year) fesom_feb_nodes_hr = fesom_feb_nodes_hr + monthly_avg(fesom_output_dir_hr + fesom_file_head + '.' + str(year) + '.ice.mean.nc', 'area', 1) fesom_sep_nodes_hr = fesom_sep_nodes_hr + monthly_avg(fesom_output_dir_hr + fesom_file_head + '.' + str(year) + '.ice.mean.nc', 'area', 8) fesom_feb_nodes_hr = fesom_feb_nodes_hr/num_years fesom_sep_nodes_hr = fesom_sep_nodes_hr/num_years fesom_feb_hr = [] fesom_sep_hr = [] for elm in elements_hr: if not elm.cavity: fesom_feb_hr.append(mean(array([fesom_feb_nodes_hr[elm.nodes[0].id], fesom_feb_nodes_hr[elm.nodes[1].id], fesom_feb_nodes_hr[elm.nodes[2].id]]))) fesom_sep_hr.append(mean(array([fesom_sep_nodes_hr[elm.nodes[0].id], fesom_sep_nodes_hr[elm.nodes[1].id], fesom_sep_nodes_hr[elm.nodes[2].id]]))) fesom_feb_hr = array(fesom_feb_hr) fesom_sep_hr = array(fesom_sep_hr) fesom_extent_5day_hr = [] f = open(fesom_log_hr, 'r') f.readline() for line in f: fesom_extent_5day_hr.append(float(line)) f.close() fesom_feb_extent_hr = zeros(num_years) fesom_sep_extent_hr = zeros(num_years) for year in range(start_year, end_year+1): t0 = (year-start_year)*73 fesom_feb_extent_hr[year-start_year] = (fesom_extent_5day_hr[t0+6]*4 + sum(fesom_extent_5day_hr[t0+7:t0+11]*5) + fesom_extent_5day_hr[t0+11]*4)/28.0 fesom_sep_extent_hr[year-start_year] = (fesom_extent_5day_hr[t0+48]*2 + sum(fesom_extent_5day_hr[t0+49:t0+54]*5) + fesom_extent_5day_hr[t0+54]*3)/30.0 time_axis = arange(start_year, end_year+1) print 'Plotting' fig = figure(figsize=(24,10)) gs1 = GridSpec(2, 4) gs1.update(left=0.1, right=0.77, wspace=0.04, hspace=0.05) # NSIDC, February ax = subplot(gs1[0, 0], aspect='equal') img = pcolor(nsidc_x, nsidc_y, nsidc_feb, vmin=0, vmax=1, cmap='jet') xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) title('NSIDC', fontsize=24) text(-39, 0, 'February', fontsize=24, ha='right') # MetROMS, February ax = subplot(gs1[0, 1], aspect='equal') img = pcolor(cice_x, cice_y, cice_feb, vmin=0, vmax=1, cmap='jet') xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) title('MetROMS', fontsize=24) # FESOM low-res, February ax = subplot(gs1[0, 2], aspect='equal') img = PatchCollection(patches_lr, cmap='jet') img.set_array(fesom_feb_lr) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) title('FESOM (low-res)', fontsize=24) # FESOM high-res, February ax = subplot(gs1[0, 3], aspect='equal') img = PatchCollection(patches_hr, cmap='jet') img.set_array(fesom_feb_hr) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) title('FESOM (high-res)', fontsize=24) # Main title text(-170, 47, 'a) Sea ice concentration ('+str(start_year)+'-'+str(end_year)+' average)', fontsize=30) # NSIDC, September ax = subplot(gs1[1, 0], aspect='equal') img = pcolor(nsidc_x, nsidc_y, nsidc_sep, vmin=0, vmax=1, cmap='jet') xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) text(-39, 0, 'September', fontsize=24, ha='right') # MetROMS, September ax = subplot(gs1[1, 1], aspect='equal') img = pcolor(cice_x, cice_y, cice_sep, vmin=0, vmax=1, cmap='jet') xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) # FESOM low-res, September ax = subplot(gs1[1, 2], aspect='equal') img = PatchCollection(patches_lr, cmap='jet') img.set_array(fesom_sep_lr) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) # FESOM high-res, September ax = subplot(gs1[1, 3], aspect='equal') img = PatchCollection(patches_hr, cmap='jet') img.set_array(fesom_sep_hr) img.set_clim(vmin=0, vmax=1) img.set_edgecolor('face') ax.add_collection(img) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) ax.set_xticks([]) ax.set_yticks([]) # Add a colourbar at the bottom cbaxes = fig.add_axes([0.1, 0.04, 0.25, 0.04]) cbar = colorbar(img, orientation='horizontal', ticks=arange(0,1+0.25,0.25), cax=cbaxes) cbar.ax.tick_params(labelsize=20) # Add extent timeseries on rightmost column, with more space for labels gs2 = GridSpec(2, 1) gs2.update(left=0.79, right=0.95, hspace=0.15) # February ax = subplot(gs2[0, 0]) ax.plot(time_axis, nsidc_feb_extent, color='black', linewidth=2, linestyle='dashed') ax.plot(time_axis, cice_feb_extent, color='blue', linewidth=1.5) ax.plot(time_axis, fesom_feb_extent_lr, color='green', linewidth=1.5) ax.plot(time_axis, fesom_feb_extent_hr, color='magenta', linewidth=1.5) xlim([start_year, end_year]) ax.set_yticks(arange(0,4+0.5,0.5)) ax.set_yticklabels(['0', '', '1', '', '2', '', '3', '', '4']) ax.tick_params(axis='x', labelsize=20) ax.tick_params(axis='y', labelsize=20) grid(True) title('b) Sea ice extent\n'+r'(million km$^2$)', fontsize=26) # Extent timeseries, September ax = subplot(gs2[1, 0]) ax.plot(time_axis, nsidc_sep_extent, color='black', label='NSIDC', linewidth=2, linestyle='dashed') ax.plot(time_axis, cice_sep_extent, color='blue', label='MetROMS', linewidth=1.5) ax.plot(time_axis, fesom_sep_extent_lr, color='green', label='FESOM (low-res)', linewidth=1.5) ax.plot(time_axis, fesom_sep_extent_hr, color='magenta', label='FESOM (high-res)', linewidth=1.5) xlim([start_year, end_year]) ax.set_yticks(arange(16,23+1,1)) ax.set_yticklabels(['16', '', '18', '', '20', '', '22', '']) ax.tick_params(axis='x', labelsize=20) ax.tick_params(axis='y', labelsize=20) grid(True) ax.legend(bbox_to_anchor=(1.04,-0.07), ncol=4, fontsize=20) fig.show() fig.savefig('aice_minmax_nsidc.png')