def total_iceshelf_area(roms_grid_file, fesom_mesh_path_lr, fesom_mesh_path_hr): id = Dataset(roms_grid_file, 'r') lon = id.variables['lon_rho'][:-15, 1:-1] lat = id.variables['lat_rho'][:-15, 1:-1] zice = id.variables['zice'][:-15, 1:-1] id.close() dx, dy = cartesian_grid_2d(lon, lat) dA = ma.masked_where(zice == 0, dx * dy) print 'MetROMS: ' + str(sum(dA)) + ' m^2' elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True, cross_180=False) area_elm_lr = zeros(len(elements_lr)) for i in range(len(elements_lr)): elm = elements_lr[i] if elm.cavity: area_elm_lr[i] = elm.area() print 'FESOM (low-res): ' + str(sum(area_elm_lr)) + ' m^2' elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True, cross_180=False) area_elm_hr = zeros(len(elements_hr)) for i in range(len(elements_hr)): elm = elements_hr[i] if elm.cavity: area_elm_hr[i] = elm.area() print 'FESOM (high-res): ' + str(sum(area_elm_hr)) + ' m^2'
def timeseries_seaice_formation(mesh_path, output_path, start_year, end_year, log_file): # Naming conventions for FESOM output files file_head = output_path + 'MK44005.' file_tail = '.ice.diag.nc' num_years = end_year - start_year + 1 # Parameters for selecting continental shelf lat0 = -60 h0 = 1500 # Seconds to years conversion sec_per_year = 365.25 * 24 * 60 * 60 print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar=True, cross_180=True) print 'Selecting continental shelf' # Set up an array of area of each element, zero if it's not on the # continental shelf shelf_areas = zeros(len(elements)) for i in range(len(elements)): elm = elements[i] lat = mean(elm.lat) bathy = mean( array([(elm.nodes[0].find_bottom()).depth, (elm.nodes[1].find_bottom()).depth, (elm.nodes[2].find_bottom()).depth])) if lat < lat0 and bathy < h0 and not elm.cavity: shelf_areas[i] = elm.area() # Set up array for net sea ice formation on continental shelf formation = zeros(num_years) for year in range(start_year, end_year + 1): print 'Processing year ' + str(year) id = Dataset(file_head + str(year) + file_tail, 'r') # Read thdgr, annually average, and convert from m/s to m/y thdgr = mean(id.variables['thdgr'][:, :], axis=0) * sec_per_year id.close() # Average over elements thdgr_elm = zeros(len(elements)) for i in range(len(elements)): elm = elements[i] thdgr_elm[i] = mean( array([ thdgr[elm.nodes[0].id], thdgr[elm.nodes[1].id], thdgr[elm.nodes[2].id] ])) # Integrate and convert to thousand km^3/y formation[year - start_year] = sum(thdgr_elm * shelf_areas) * 1e-12 print 'Saving results to log file' f = open(log_file, 'w') f.write('Net sea ice formation on continental shelf (thousand km^3/y):\n') for t in range(num_years): f.write(str(formation[t]) + '\n') f.close()
def rcp_seaice_extent_change (): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = ['/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/', '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/', '/short/y99/kaa561/FESOM/highres_spinup/'] file_beg = 'avg.ice.mean.1996.2005.nc' file_end = 'avg.ice.mean.2091.2100.nc' # Titles expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL'] num_expts = len(directories) # Mesh parameters circumpolar = True cross_180 = False print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar, cross_180) num_elm = len(elements) print 'Reading data' print '...1996-2005' # Calculate monthly averages for September aice_nodes_beg = monthly_avg(directory_beg + file_beg, 'area', 8) n2d = size(aice_nodes_beg) aice_nodes_end = empty([num_expts, n2d]) for expt in range(num_expts): print '...' + expt_names[expt] aice_nodes_end[expt,:] = monthly_avg(directories[expt] + file_end, 'area', 8) print 'Calculating element-averages' aice_beg = empty(num_elm) aice_end = empty([num_expts, num_elm]) # Also save area of each element area_elm = empty(num_elm) for i in range(num_elm): elm = elements[i] area_elm[i] = elm.area() aice_beg[i] = (aice_nodes_beg[elm.nodes[0].id] + aice_nodes_beg[elm.nodes[1].id] + aice_nodes_beg[elm.nodes[2].id])/3.0 for expt in range(num_expts): aice_end[expt,i] = (aice_nodes_end[expt,elm.nodes[0].id] + aice_nodes_end[expt,elm.nodes[1].id] + aice_nodes_end[expt,elm.nodes[2].id])/3.0 print 'Sea ice extent:' # 1996-2005 # Select elements with concentration >= 15% flag_beg = aice_beg > 0.15 # Integrate the area of these elements and convert to million km^2 extent_beg = sum(flag_beg*area_elm)*1e-12 print '1996-2005: ' + str(extent_beg) + ' million km^2' # 2091-2100 flag_end = aice_end > 0.15 for expt in range(num_expts): extent_end = sum(flag_end[expt,:]*area_elm)*1e-12 percent_change = (extent_end - extent_beg)/extent_beg*100 print expt_names[expt] + ': ' + str(extent_end) + ' million km^2; change of ' + str(percent_change) + '%'
def timeseries_seaice_extent_faster(mesh_path, output_path, start_year, end_year, log_file): circumpolar = True # Only consider elements south of 30S cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step expt_name = 'MK44005' print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) extent = [] for year in range(start_year, end_year + 1): print year ice_file = output_path + expt_name + '.' + str(year) + '.ice.mean.nc' print 'Reading data' id = Dataset(ice_file, 'r') num_time = id.variables['time'].shape[0] aice = id.variables['area'][:, :] id.close() print 'Setting up arrays' # Sea ice concentration at each element aice_elm = zeros([num_time, len(elements)]) # Area of each element area_elm = zeros(len(elements)) # Loop over elements to fill these in for i in range(len(elements)): elm = elements[i] # Average aice over 3 component nodes aice_elm[:, i] = (aice[:, elm.nodes[0].id] + aice[:, elm.nodes[1].id] + aice[:, elm.nodes[2].id]) / 3 # Call area function area_elm[i] = elm.area() # Select elements with concentration >= 15% flag = aice_elm >= 0.15 print 'Building timeseries' for t in range(num_time): # Integrate extent and convert to million km^2 extent.append(sum(flag[t, :] * area_elm) * 1e-12) print 'Saving results to log file' f = open(log_file, 'w') f.write('Sea Ice Extent (million km^2):\n') for elm in extent: f.write(str(elm) + '\n') f.close()
def mip_calc_watermasses(roms_grid, roms_file, fesom_mesh_lr, fesom_mesh_hr, fesom_file_lr, fesom_file_hr): # Sectors to consider sector_names = [ 'Filchner-Ronne Ice Shelf Cavity', 'Eastern Weddell Region Cavities', 'Amery Ice Shelf Cavity', 'Australian Sector Cavities', 'Ross Sea Cavities', 'Amundsen Sea Cavities', 'Bellingshausen Sea Cavities', 'Larsen Ice Shelf Cavities', 'All Ice Shelf Cavities' ] num_sectors = len(sector_names) # Water masses to consider wm_names = ['ISW', 'AASW', 'CDW', 'MCDW', 'WW', 'HSSW'] num_watermasses = len(wm_names) # ROMS vertical grid parameters theta_s = 7.0 theta_b = 2.0 hc = 250 N = 31 # FESOM mesh parameters circumpolar = True cross_180 = False print 'Processing MetROMS' # Read ROMS grid variables we need id = Dataset(roms_grid, 'r') roms_lon = id.variables['lon_rho'][:, :] roms_lat = id.variables['lat_rho'][:, :] roms_h = id.variables['h'][:, :] roms_zice = id.variables['zice'][:, :] id.close() num_lat = size(roms_lat, 0) num_lon = size(roms_lon, 1) # Get integrands on 3D grid roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d( roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N) # Get volume integrand dV = roms_dx * roms_dy * roms_dz # Read ROMS output id = Dataset(roms_file, 'r') roms_temp = id.variables['temp'][0, :, :, :] roms_salt = id.variables['salt'][0, :, :, :] id.close() # Initialise volume of each water mass in each sector roms_vol_watermass = zeros([num_watermasses, num_sectors]) # Calculate water mass breakdown for j in range(num_lat): for i in range(num_lon): # Select ice shelf points if roms_zice[j, i] < 0: # Figure out which sector this point falls into lon = roms_lon[j, i] if lon > 180: lon -= 360 lat = roms_lat[j, i] if lon >= -85 and lon < -30 and lat < -74: # Filchner-Ronne sector = 0 elif lon >= -30 and lon < 65: # Eastern Weddell region sector = 1 elif lon >= 65 and lon < 76: # Amery sector = 2 elif lon >= 76 and lon < 165 and lat >= -74: # Australian sector sector = 3 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): # Ross Sea sector = 4 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): # Amundsen Sea sector = 5 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): # Bellingshausen Sea sector = 6 elif lon >= -66 and lon < -59 and lat >= -74: # Larsen Ice Shelves sector = 7 else: print 'No region found for lon=', str(lon), ', lat=', str( lat) break #return # Loop downward for k in range(N): curr_temp = roms_temp[k, j, i] curr_salt = roms_salt[k, j, i] curr_volume = dV[k, j, i] # Get surface freezing point at this salinity curr_tfrz = curr_salt / (-18.48 + 18.48 / 1e3 * curr_salt) # Figure out what water mass this is if curr_temp < curr_tfrz: # ISW wm_key = 0 elif curr_salt < 34: # AASW wm_key = 1 elif curr_temp > 0: # CDW wm_key = 2 elif curr_temp > -1: # MCDW wm_key = 3 elif curr_salt < 34.5: # WW wm_key = 4 else: # HSSW wm_key = 5 # Integrate volume for the right water mass and sector roms_vol_watermass[wm_key, sector] += curr_volume # Also integrate total Antarctica roms_vol_watermass[wm_key, -1] += curr_volume # Find total volume of each sector by adding up the volume of each # water mass roms_vol_sectors = sum(roms_vol_watermass, axis=0) # Calculate percentage of each water mass in each sector roms_percent_watermass = zeros([num_watermasses, num_sectors]) for wm_key in range(num_watermasses): for sector in range(num_sectors): roms_percent_watermass[wm_key, sector] = roms_vol_watermass[ wm_key, sector] / roms_vol_sectors[sector] * 100 print 'Processing low-res FESOM' # Build mesh elements_lr = fesom_grid(fesom_mesh_lr, circumpolar, cross_180) id = Dataset(fesom_file_lr, 'r') temp_nodes_lr = id.variables['temp'][0, :] salt_nodes_lr = id.variables['salt'][0, :] id.close() fesom_vol_watermass_lr = zeros([num_watermasses, num_sectors]) for i in range(len(elements_lr)): elm = elements_lr[i] if elm.cavity: lon = mean(elm.lon) lat = mean(elm.lat) if lon >= -85 and lon < -30 and lat < -74: sector = 0 elif lon >= -30 and lon < 65: sector = 1 elif lon >= 65 and lon < 76: sector = 2 elif lon >= 76 and lon < 165 and lat >= -74: sector = 3 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): sector = 4 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): sector = 5 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): sector = 6 elif lon >= -66 and lon < -59 and lat >= -74: sector = 7 else: print 'No region found for lon=', str(lon), ', lat=', str(lat) break #return # Get area of 2D element area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[1].below is None or nodes[ 2].below is None: # Reached the bottom break # Calculate average temperature, salinity, and # layer thickness for this 3D triangular prism temp_vals = [] salt_vals = [] dz_vals = [] for n in range(3): temp_vals.append(temp_nodes_lr[nodes[n].id]) salt_vals.append(salt_nodes_lr[nodes[n].id]) temp_vals.append(temp_nodes_lr[nodes[n].below.id]) salt_vals.append(salt_nodes_lr[nodes[n].below.id]) dz_vals.append(abs(nodes[n].depth - nodes[n].below.depth)) # Get ready for next iteration of loop nodes[n] = nodes[n].below curr_temp = mean(array(temp_vals)) curr_salt = mean(array(salt_vals)) curr_volume = area * mean(array(dz_vals)) curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt( curr_salt**3) - 2.155e-4 * curr_salt**2 if curr_temp < curr_tfrz: wm_key = 0 elif curr_salt < 34: wm_key = 1 elif curr_temp > 0: wm_key = 2 elif curr_temp > -1: wm_key = 3 elif curr_salt < 34.5: wm_key = 4 else: wm_key = 5 fesom_vol_watermass_lr[wm_key, sector] += curr_volume fesom_vol_watermass_lr[wm_key, -1] += curr_volume fesom_vol_sectors_lr = sum(fesom_vol_watermass_lr, axis=0) fesom_percent_watermass_lr = zeros([num_watermasses, num_sectors]) for wm_key in range(num_watermasses): for sector in range(num_sectors): fesom_percent_watermass_lr[ wm_key, sector] = fesom_vol_watermass_lr[ wm_key, sector] / fesom_vol_sectors_lr[sector] * 100 print 'Processing high-res FESOM' elements_hr = fesom_grid(fesom_mesh_hr, circumpolar, cross_180) fesom_vol_watermass_hr = zeros([num_watermasses, num_sectors]) id = Dataset(fesom_file_hr, 'r') temp_nodes_hr = id.variables['temp'][0, :] salt_nodes_hr = id.variables['salt'][0, :] id.close() for i in range(len(elements_hr)): elm = elements_hr[i] if elm.cavity: lon = mean(elm.lon) lat = mean(elm.lat) if lon >= -85 and lon < -30 and lat < -74: sector = 0 elif lon >= -30 and lon < 65: sector = 1 elif lon >= 65 and lon < 76: sector = 2 elif lon >= 76 and lon < 165 and lat >= -74: sector = 3 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): sector = 4 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): sector = 5 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): sector = 6 elif lon >= -66 and lon < -59 and lat >= -74: sector = 7 else: print 'No region found for lon=', str(lon), ', lat=', str(lat) break #return area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] while True: if nodes[0].below is None or nodes[1].below is None or nodes[ 2].below is None: break temp_vals = [] salt_vals = [] dz_vals = [] for n in range(3): temp_vals.append(temp_nodes_hr[nodes[n].id]) salt_vals.append(salt_nodes_hr[nodes[n].id]) temp_vals.append(temp_nodes_hr[nodes[n].below.id]) salt_vals.append(salt_nodes_hr[nodes[n].below.id]) dz_vals.append(abs(nodes[n].depth - nodes[n].below.depth)) nodes[n] = nodes[n].below curr_temp = mean(array(temp_vals)) curr_salt = mean(array(salt_vals)) curr_volume = area * mean(array(dz_vals)) curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt( curr_salt**3) - 2.155e-4 * curr_salt**2 if curr_temp < curr_tfrz: wm_key = 0 elif curr_salt < 34: wm_key = 1 elif curr_temp > 0: wm_key = 2 elif curr_temp > -1: wm_key = 3 elif curr_salt < 34.5: wm_key = 4 else: wm_key = 5 fesom_vol_watermass_hr[wm_key, sector] += curr_volume fesom_vol_watermass_hr[wm_key, -1] += curr_volume fesom_vol_sectors_hr = sum(fesom_vol_watermass_hr, axis=0) fesom_percent_watermass_hr = zeros([num_watermasses, num_sectors]) for wm_key in range(num_watermasses): for sector in range(num_sectors): fesom_percent_watermass_hr[ wm_key, sector] = fesom_vol_watermass_hr[ wm_key, sector] / fesom_vol_sectors_hr[sector] * 100 # Print results for sector in range(num_sectors): print sector_names[sector] print 'MetROMS:' for wm_key in range(num_watermasses): print str(roms_percent_watermass[wm_key, sector]) + '% ' + wm_names[wm_key] print 'FESOM low-res:' for wm_key in range(num_watermasses): print str( fesom_percent_watermass_lr[wm_key, sector]) + '% ' + wm_names[wm_key] print 'FESOM high-res:' for wm_key in range(num_watermasses): print str( fesom_percent_watermass_hr[wm_key, sector]) + '% ' + wm_names[wm_key]
def ts_animation(mesh_path, directory, start_year, end_year, fig_dir): # Northern boundary of water masses to consider nbdry = -50 # Number of temperature and salinity bins num_bins = 1000 # Plotting parameters circumpolar = False cross_180 = False # Bounds on temperature and salinity bins (pre-computed, change if needed) min_salt = 31.8 max_salt = 35.2 min_temp = -3 max_temp = 12 # Bounds on volume log scale (pre-computed, change if needed) min_vol = 18 max_vol = 33 # Naming conventions for FESOM oce.mean.nc files file_head = 'MK44005.' file_tail = '.oce.mean.nc' # Calculate boundaries of temperature bins temp_bins = linspace(min_temp, max_temp, num=num_bins) # Calculate centres of temperature bins (for plotting) temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:]) # Repeat for salinity salt_bins = linspace(min_salt, max_salt, num=num_bins) salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:]) # Calculate surface freezing point as a function of salinity: this is the # equation the FESOM sea ice code uses freezing_pt = -0.0575 * salt_centres + 1.7105e-3 * sqrt( salt_centres**3) - 2.155e-4 * salt_centres**2 # Get 2D versions of the temperature and salinity bins salt_2d, temp_2d = meshgrid(salt_centres, temp_centres) # Calculate potential density of each combination of temperature and # salinity bins density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres))) - 1000 # Density contours to plot density_lev = arange(24.4, 28.4, 0.2) # Make FESOM grid elements elements = fesom_grid(mesh_path, circumpolar, cross_180) # Loop over years for year in range(start_year, end_year + 1): print 'Processing ' + str(year) # Read temperature and salinity at each 3D node, annually averaged id = Dataset(directory + file_head + str(year) + file_tail, 'r') temp = mean(id.variables['temp'][:, :], axis=0) salt = mean(id.variables['salt'][:, :], axis=0) id.close() # Set up a 2D array of temperature bins x salinity bins to increment # with volume of water masses ts_vals = zeros([size(temp_centres), size(salt_centres)]) # Loop over elements for elm in elements: # See if we're in the region of interest if all(elm.lat < nbdry): # Get area of 2D triangle area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[ 1].below is None or nodes[2].below is None: # We've reached the bottom break # Calculate average temperature, salinity, and layer # thickness over this 3D triangular prism temp_vals = [] salt_vals = [] dz = [] for i in range(3): # Average temperature over 6 nodes temp_vals.append(temp[nodes[i].id]) temp_vals.append(temp[nodes[i].below.id]) # Average salinity over 6 nodes salt_vals.append(salt[nodes[i].id]) salt_vals.append(salt[nodes[i].below.id]) # Average dz over 3 vertical edges dz.append(abs(nodes[i].depth - nodes[i].below.depth)) # Get ready for next repetition of loop nodes[i] = nodes[i].below temp_elm = mean(array(temp_vals)) salt_elm = mean(array(salt_vals)) # Calculate volume of 3D triangular prism volume = area * mean(array(dz)) # Figure out which bins this falls into temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1 salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1 # Increment bins with volume ts_vals[temp_index, salt_index] += volume # Mask bins with zero volume ts_vals = ma.masked_where(ts_vals == 0, ts_vals) # Plot fig = figure(figsize=(12, 12)) # Log scale is more visible img = pcolor(salt_centres, temp_centres, log(ts_vals), vmin=min_vol, vmax=max_vol, cmap='jet') # Add surface freezing point line plot(salt_centres, freezing_pt, color='black', linestyle='dashed') # Add density contours cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6, 0.6, 0.6), linestyles='dotted') # Label density contours manual_locations = [(32, 11.4), (32.3, 11.4), (32.5, 11.4), (32.8, 11.4), (33.1, 11.4), (33.3, 11.4), (33.5, 11.3), (33.8, 11.3), (34.1, 11.3), (34.3, 11.3), (34.6, 11.3), (34.8, 11.4), (35, 10.8), (35, 9.9), (35, 8.1), (35, 7.5), (35, 6), (35, 4.4), (35, 2.6), (35.1, 0)] clabel(cs, inline=1, fontsize=12, color=(0.6, 0.6, 0.6), fmt='%1.1f', manual=manual_locations) xlim([min_salt, max_salt]) ylim([min_temp, max_temp]) xlabel('Salinity (psu)', fontsize=16) ylabel(r'Temperature ($^{\circ}$C)', fontsize=16) title('Water masses south of ' + str(-nbdry) + r'$^{\circ}$S, log(volume)', fontsize=24) colorbar(img) # Add year in the bottom corner text(35.8, -4, str(year), fontsize=30) # Save figure with year in the filename fig.savefig(fig_dir + str(year) + '.png')
def timeseries_watermass_meltpotential(mesh_path, output_path, start_year, end_year, log_file): # Titles for each sector sector_names = [ 'Filchner-Ronne Ice Shelf Cavity', 'Eastern Weddell Region Cavities', 'Amery Ice Shelf Cavity', 'Australian Sector Cavities', 'Ross Sea Cavities', 'Amundsen Sea Cavities', 'Bellingshausen Sea Cavities', 'Larsen Ice Shelf Cavities', 'All Ice Shelf Cavities' ] num_sectors = len(sector_names) # Water masses to consider wm_names = ['ISW', 'HSSW', 'LSSW', 'AASW', 'MCDW', 'CDW'] num_watermasses = len(wm_names) # Only consider elements south of 30S circumpolar = True # Don't make second copies of elements that cross 180E cross_180 = False # Naming conventions for FESOM output files file_head = output_path + 'MK44005.' file_tail = '.oce.mean.nc' num_years = end_year - start_year + 1 # Specific heat of seawater (J/K/kg) cpw = 4180 # Coefficients for in-situ freezing point calculation a = -0.0575 # Salinity dependence (K/psu) b = 0.0901 # Surface freezing point at 0 salinity (C) c = 7.61e-4 # Depth dependence (K/m) print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Categorising elements into sectors' location_flag = zeros([num_sectors, len(elements)]) for i in range(len(elements)): elm = elements[i] # Make sure we're actually in an ice shelf cavity if elm.cavity: # Figure out which sector this ice shelf element falls into lon = mean(elm.lon) lat = mean(elm.lat) if lon >= -85 and lon < -30 and lat < -74: # Filchner-Ronne location_flag[0, i] = 1 elif lon >= -30 and lon < 65: # Eastern Weddell region location_flag[1, i] = 1 elif lon >= 65 and lon < 76: # Amery location_flag[2, i] = 1 elif lon >= 76 and lon < 165 and lat >= -74: # Australian sector location_flag[3, i] = 1 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): # Ross Sea location_flag[4, i] = 1 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): # Amundsen Sea location_flag[5, i] = 1 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): # Bellingshausen Sea location_flag[6, i] = 1 elif lon >= -66 and lon < -59 and lat >= -74: # Larsen Ice Shelves location_flag[7, i] = 1 else: print 'No region found for lon=', str(lon), ', lat=', str(lat) break #return # All ice shelf elements are in Total Antarctica location_flag[8, i] = 1 print 'Calculating melt potential' mp = zeros([num_watermasses, num_sectors, num_years]) for year in range(start_year, end_year + 1): print 'Processing ' + str(year) # Read temperature and salinity id = Dataset(file_head + str(year) + file_tail, 'r') temp = mean(id.variables['temp'][:, :], axis=0) salt = mean(id.variables['salt'][:, :], axis=0) id.close() # Loop over elements for i in range(len(elements)): elm = elements[i] # Check if we're in an ice shelf cavity if elm.cavity: # Get area of 2D element area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[ 1].below is None or nodes[2].below is None: # Reached the bottom break # Calculate average temperature, salinity, depth, and # layer thickness for this 3D triangular prism temp_vals = [] salt_vals = [] z_vals = [] dz_vals = [] for n in range(3): temp_vals.append(temp[nodes[n].id]) salt_vals.append(salt[nodes[n].id]) z_vals.append(nodes[n].depth) temp_vals.append(temp[nodes[n].below.id]) salt_vals.append(salt[nodes[n].below.id]) z_vals.append(nodes[n].below.depth) dz_vals.append( abs(nodes[n].depth - nodes[n].below.depth)) # Get ready for next iteration of loop nodes[n] = nodes[n].below curr_temp = mean(array(temp_vals)) curr_salt = mean(array(salt_vals)) curr_z = mean(array(z_vals)) curr_volume = area * mean(array(dz_vals)) # Get surface freezing point at this salinity curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt( curr_salt**3) - 2.155e-4 * curr_salt**2 # Figure out what water mass this is if curr_temp < curr_tfrz: # ISW wm_key = 0 elif curr_salt < 34: # AASW wm_key = 3 elif curr_temp > 0: # CDW wm_key = 5 elif curr_temp > -1.5: # MCDW wm_key = 4 elif curr_salt < 34.5: # LSSW wm_key = 2 else: # HSSW wm_key = 1 # Integrate melt potential # First need (potential) density curr_rho = unesco(curr_temp, curr_salt, 0) # And in-situ freezing point curr_tfrz_insitu = a * curr_salt + b + c * (-1 * curr_z) curr_sectors = 0 for sector in range(num_sectors): if location_flag[sector, i] == 1: curr_sectors += 1 mp[wm_key, sector, year - start_year] += (curr_temp - curr_tfrz_insitu ) * curr_volume * cpw * curr_rho # Should be in exactly 2 sectors (1 + total Antarctica) if curr_sectors != 2: print 'Wrong number of sectors for element ' + str(i) print 'Saving results to log file' f = open(log_file, 'w') for wm_key in range(num_watermasses): for sector in range(num_sectors): f.write('Melt potential of ' + wm_names[wm_key] + ' in ' + sector_names[sector] + '(J)\n') for t in range(num_years): f.write(str(mp[wm_key, sector, t]) + '\n') f.close()
def zonal_ts_before_after_ross_2094(): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' file_beg = '/short/y99/kaa561/FESOM/highres_spinup/annual_avg.oce.mean.1996.2005.nc' file_end = '/short/y99/kaa561/FESOM/rcp85_A/output/MK44005.2094.oce.mean.nc' lon0 = -159 lat_min = -85 lat_max = -73 print 'Building FESOM mesh' elm2D = fesom_grid(mesh_path) print 'Reading temperature and salinity data' id = Dataset(file_beg, 'r') temp_nodes_beg = id.variables['temp'][0, :] salt_nodes_beg = id.variables['salt'][0, :] id.close() # Annually average 2094 id = Dataset(file_end, 'r') temp_nodes_end = mean(id.variables['temp'][:, :], axis=0) salt_nodes_end = mean(id.variables['salt'][:, :], axis=0) id.close() print 'Interpolating to ' + str(lon0) # Build arrays of SideElements making up zonal slices # Start with beginning selements_temp_beg = fesom_sidegrid(elm2D, temp_nodes_beg, lon0, lat_max) selements_salt_beg = fesom_sidegrid(elm2D, salt_nodes_beg, lon0, lat_max) # Build array of quadrilateral patches for the plots, and data values # corresponding to each SideElement patches = [] temp_beg = [] for selm in selements_temp_beg: # Make patch coord = transpose(vstack((selm.y, selm.z))) patches.append(Polygon(coord, True, linewidth=0.)) # Save data value temp_beg.append(selm.var) temp_beg = array(temp_beg) # Salinity has same patches but different values salt_beg = [] for selm in selements_salt_beg: salt_beg.append(selm.var) salt_beg = array(salt_beg) # Repeat for end selements_temp_end = fesom_sidegrid(elm2D, temp_nodes_end, lon0, lat_max) selements_salt_end = fesom_sidegrid(elm2D, salt_nodes_end, lon0, lat_max) temp_end = [] for selm in selements_temp_end: temp_end.append(selm.var) temp_end = array(temp_end) salt_end = [] for selm in selements_salt_end: salt_end.append(selm.var) salt_end = array(salt_end) # Find bounds on each variable temp_min = min(amin(temp_beg), amin(temp_end)) temp_max = max(amax(temp_beg), amax(temp_end)) salt_min = min(amin(salt_beg), amin(salt_end)) salt_max = max(amax(salt_beg), amax(salt_end)) # Find deepest depth # Start with 0 depth_min = 0 # Modify with patches for selm in selements_temp_beg: depth_min = min(depth_min, amin(selm.z)) # Round down to nearest 50 metres depth_min = floor(depth_min / 50) * 50 print 'Plotting' fig = figure(figsize=(16, 10)) # Temperature gs_temp = GridSpec(1, 2) gs_temp.update(left=0.11, right=0.9, bottom=0.5, top=0.9, wspace=0.05, hspace=0.5) # Beginning ax = subplot(gs_temp[0, 0]) img = PatchCollection(patches, cmap='jet') img.set_array(temp_beg) img.set_edgecolor('face') img.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img) xlim([lat_min, lat_max]) ylim([depth_min, 0]) title(r'Temperature ($^{\circ}$C), 1996-2005', fontsize=24) ax.set_xticklabels([]) ylabel('Depth (m)', fontsize=18) # End ax = subplot(gs_temp[0, 1]) img = PatchCollection(patches, cmap='jet') img.set_array(temp_end) img.set_edgecolor('face') img.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img) xlim([lat_min, lat_max]) ylim([depth_min, 0]) title(r'Temperature ($^{\circ}$C), 2094', fontsize=24) ax.set_xticklabels([]) ax.set_yticklabels([]) # Add a colorbar on the right cbaxes = fig.add_axes([0.92, 0.55, 0.02, 0.3]) cbar = colorbar(img, cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=16) # Salinity gs_salt = GridSpec(1, 2) gs_salt.update(left=0.11, right=0.9, bottom=0.05, top=0.45, wspace=0.05, hspace=0.5) # Beginning ax = subplot(gs_salt[0, 0]) img = PatchCollection(patches, cmap='jet') img.set_array(salt_beg) img.set_edgecolor('face') img.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img) xlim([lat_min, lat_max]) ylim([depth_min, 0]) title('Salinity (psu), 1996-2005', fontsize=24) xlabel('Latitude', fontsize=18) ylabel('Depth (m)', fontsize=18) # End ax = subplot(gs_salt[0, 1]) img = PatchCollection(patches, cmap='jet') img.set_array(salt_end) img.set_edgecolor('face') img.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img) xlim([lat_min, lat_max]) ylim([depth_min, 0]) title('Salinity (psu), 2094', fontsize=24) xlabel('Latitude', fontsize=18) ax.set_yticklabels([]) # Add a colorbar on the right cbaxes = fig.add_axes([0.92, 0.1, 0.02, 0.3]) cbar = colorbar(img, cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=16) # Main title suptitle(r'RCP 8.5 A, 159$^{\circ}$W', fontsize=28) fig.show() fig.savefig('159W_rcp85_A_2094.png')
# Command-line interface if __name__ == "__main__": mesh_path = raw_input("Path to FESOM mesh directory: ") file_path = raw_input("Path to FESOM oce.mean.nc file: ") tstep = int(raw_input("Time index to plot (starting at 1): ")) lon0 = float(raw_input("Longitude in degrees (-180 to 180): ")) depth_min = -1*float(raw_input("Deepest depth to plot (positive, metres): ")) action = raw_input("Save figure (s) or display in window (d)? ") if action == 's': save = True fig_name = raw_input("File name for figure: ") elif action == 'd': save = False fig_name = None elm2D = fesom_grid(mesh_path) temp_salt_slice(elm2D, file_path, tstep, lon0, depth_min, save, fig_name) # Repeat until the user is finished while True: repeat = raw_input("Make another plot (y/n)? ") if repeat == 'y': update_mesh = False while True: changes = raw_input("Enter a parameter to change: (1) mesh path, (2) file path, (3) timestep, (4) longitude, (5) deepest depth, (6) save/display; or enter to continue: ") if len(changes) == 0: break else: if int(changes) == 1: update_mesh = True mesh_path = raw_input("Path to FESOM mesh directory: ")
def massloss_percent_winds (): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_winds = '/short/y99/kaa561/FESOM/rcp85_M/' directory_nowinds = '/short/y99/kaa561/FESOM/rcp85_M_no_wind_anom/' file_name = 'annual_avg.forcing.diag.2091.2100.nc' # Seconds per year sec_per_year = 365.25*24*3600 # Density of ice in kg/m^3 rho_ice = 916 # Sectors to split Antarctica into sector_names = ['Filchner-Ronne Ice Shelf', 'Eastern Weddell Region', 'Amery Ice Shelf', 'Australian Sector', 'Ross Sea', 'Amundsen Sea', 'Bellingshausen Sea', 'Larsen Ice Shelves'] # Number of sectors num_sectors = len(sector_names) print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar=True, cross_180=False) print 'Reading data' id = Dataset(directory_winds + file_name, 'r') ismr_nodes_winds = id.variables['wnet'][0,:]*sec_per_year id.close() id = Dataset(directory_nowinds + file_name, 'r') ismr_nodes_nowinds = id.variables['wnet'][0,:]*sec_per_year id.close() # Average over elements in ice shelf cavities ismr_elm_winds = [] ismr_elm_nowinds = [] for elm in elements: if elm.cavity: ismr_elm_winds.append(mean([ismr_nodes_winds[elm.nodes[0].id], ismr_nodes_winds[elm.nodes[1].id], ismr_nodes_winds[elm.nodes[2].id]])) ismr_elm_nowinds.append(mean([ismr_nodes_nowinds[elm.nodes[0].id], ismr_nodes_nowinds[elm.nodes[1].id], ismr_nodes_nowinds[elm.nodes[2].id]])) ismr_elm_winds = array(ismr_elm_winds) ismr_elm_nowinds = array(ismr_elm_nowinds) print 'Integrating mass loss' total_massloss_winds = zeros(num_sectors) total_massloss_nowinds = zeros(num_sectors) # Loop over elements i = 0 for elm in elements: if elm.cavity: # Figure out which sector this ice shelf element falls into # First get average lon and lat across 3 Nodes lon = mean(elm.lon) lat = mean(elm.lat) if lon >= -85 and lon < -30 and lat < -74: # Filchner-Ronne index = 0 elif lon >= -30 and lon < 65: # Eastern Weddell region index = 1 elif lon >= 65 and lon < 76: # Amery index = 2 elif lon >= 76 and lon < 165 and lat >= -74: # Australian sector index = 3 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): # Ross Sea index = 4 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): # Amundsen Sea index = 5 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): # Bellingshausen Sea index = 6 elif lon >= -66 and lon < -59 and lat >= -74: # Larsen Ice Shelves index = 7 else: print 'No region found for lon=',str(lon),', lat=',str(lat) break #return # Integrate total mass loss in this sector total_massloss_winds[index] += ismr_elm_winds[i]*elm.area()*rho_ice*1e-12 total_massloss_nowinds[index] += ismr_elm_nowinds[i]*elm.area()*rho_ice*1e-12 i += 1 # Calculate change in mass loss due to removing winds for index in range(num_sectors): massloss_winds = total_massloss_winds[index] massloss_nowinds = total_massloss_nowinds[index] percent_change = (massloss_nowinds - massloss_winds)/massloss_winds*1e2 print sector_names[index] + ': ' + str(percent_change) + '%' # Total Antarctica massloss_winds = sum(total_massloss_winds) massloss_nowinds = sum(total_massloss_nowinds) percent_change = (massloss_nowinds - massloss_winds)/massloss_winds*1e2 print 'Total Antarctica: ' + str(percent_change) + '%'
def timeseries_amundsen(mesh_path, ice_diag_file, log_file): # Mesh parameters circumpolar = True cross_180 = False # Number of days for each output step days_per_output = 5 # Bounds on Amundsen Sea box lon_min = -115 lon_max = -100 lat_max = -71 avg_ice2ocn = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: avg_ice2ocn.append(float(line)) f.close() print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) num_elm2D = len(elements) print 'Reading data' # Change sign on ice growth rate in m/s, multiply by 1e8 for visibility id = Dataset(ice_diag_file, 'r') ice2ocn = -1e8 * id.variables['thdgr'][:, :] id.close() num_time = size(ice2ocn, 0) print 'Setting up arrays' # Location flag for non-cavity elements in Amundsen Sea location_flag = zeros(num_elm2D) # Area of each element in Amundsen Sea area_elm = zeros(num_elm2D) # Ice to ocean freshwater flux timeseries at each element ice2ocn_elm = zeros([num_time, num_elm2D]) # Loop over each element to fill these in for i in range(num_elm2D): elm = elements[i] # Ignore ice shelf cavities if not elm.cavity: # Check if we're within the given lon and lat bounds if all(elm.lon >= lon_min) and all(elm.lon <= lon_max) and all( elm.lat <= lat_max): # Save area area_elm[i] = elm.area() # Set location flag location_flag[i] = 1 # Average ice-ocean freshwater flux timeseries over 3 components ice2ocn_elm[:, i] = (ice2ocn[:, elm.nodes[0].id] + ice2ocn[:, elm.nodes[1].id] + ice2ocn[:, elm.nodes[2].id]) / 3.0 print 'Building timeseries' for t in range(num_time): # Average over area of the correct elements avg_ice2ocn.append( sum(ice2ocn_elm[t, :] * area_elm * location_flag) / sum(area_elm * location_flag)) print 'Saving results to log file' f = open(log_file, 'w') f.write('Average ice-to-ocean freshwater flux (1e-8 m/s): \n') for val in avg_ice2ocn: f.write(str(val) + '\n')
def zonal_cavity_ts_res(): # Paths to mesh directories mesh_path_low = '../FESOM/mesh/low_res/' mesh_path_high = '../FESOM/mesh/high_res/' # Paths to output files output_path_low = '../FESOM/lowres_spinup/rep3/' output_path_high = '../FESOM/highres_spinup/rep3/' file_name = 'annual_avg.oce.mean.nc' # Name of each ice shelf shelf_names = [ 'Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf' ] # Beginnings of filenames for figures fig_heads = [ 'larsen_d', 'larsen_c', 'wilkins_georgevi_stange', 'ronne_filchner', 'abbot', 'pig', 'thwaites', 'dotson', 'getz', 'nickerson', 'sulzberger', 'mertz', 'totten_moscowuni', 'shackleton', 'west', 'amery', 'prince_harald', 'baudouin_borchgrevink', 'lazarev', 'nivl', 'fimbul_jelbart_ekstrom', 'brunt_riiser_larsen', 'ross' ] # Longitudes intersecting each ice shelf lon0 = [ -60, -62, -68, -55, -93, -101, -106, -113, -120, -145, -150, 145, 116, 96, 85, 71, 36, 25, 15, 11, -1, -20, 180 ] # Latitude bounds for each ice shelf lat_min = [ -73.1, -69.35, -73.1, -82.6, -73.28, -75.4, -75.5, -75, -74.9, -75.9, -77.8, -67.7, -67.17, -66.67, -67.25, -72, -69.7, -71, -70.4, -70.75, -71.83, -75.6, -84.6 ] lat_max = [ -72, -66.13, -70, -75.5, -72.3, -74.4, -74.67, -74, -73.5, -75.3, -76.41, -67, -66.5, -64.83, -66.25, -68.5, -68.7, -69.9, -69.33, -69.83, -69.33, -72.9, -77 ] num_shelves = len(shelf_names) print 'Building FESOM mesh' elm2D_low = fesom_grid(mesh_path_low) elm2D_high = fesom_grid(mesh_path_high) print 'Reading temperature and salinity data' id = Dataset(output_path_low + file_name, 'r') temp_nodes_low = id.variables['temp'][0, :] salt_nodes_low = id.variables['salt'][0, :] id.close() id = Dataset(output_path_high + file_name, 'r') temp_nodes_high = id.variables['temp'][0, :] salt_nodes_high = id.variables['salt'][0, :] id.close() # Loop over ice shelves for index in range(num_shelves): print 'Processing ' + shelf_names[index] # Figure out what to write on the title about longitude if lon0[index] < 0: lon_string = ' (' + str(-lon0[index]) + r'$^{\circ}$W)' else: lon_string = ' (' + str(lon0[index]) + r'$^{\circ}$E)' # Build arrays of SideElements making up zonal slices selements_temp_low = fesom_sidegrid(elm2D_low, temp_nodes_low, lon0[index], lat_max[index]) selements_salt_low = fesom_sidegrid(elm2D_low, salt_nodes_low, lon0[index], lat_max[index]) selements_temp_high = fesom_sidegrid(elm2D_high, temp_nodes_high, lon0[index], lat_max[index]) selements_salt_high = fesom_sidegrid(elm2D_high, salt_nodes_high, lon0[index], lat_max[index]) # Build array of quadrilateral patches for the plots, and data values # corresponding to each SideElement patches_low = [] temp_low = [] for selm in selements_temp_low: # Make patch coord = transpose(vstack((selm.y, selm.z))) patches_low.append(Polygon(coord, True, linewidth=0.)) # Save data value temp_low.append(selm.var) temp_low = array(temp_low) # Salinity has same patches but different values salt_low = [] for selm in selements_salt_low: salt_low.append(selm.var) salt_low = array(salt_low) # Repeat for high-res patches_high = [] temp_high = [] for selm in selements_temp_high: coord = transpose(vstack((selm.y, selm.z))) patches_high.append(Polygon(coord, True, linewidth=0.)) temp_high.append(selm.var) temp_high = array(temp_high) salt_high = [] for selm in selements_salt_high: salt_high.append(selm.var) salt_high = array(salt_high) # Find bounds on each variable temp_min = min(amin(temp_low), amin(temp_high)) temp_max = max(amax(temp_low), amax(temp_high)) salt_min = min(amin(salt_low), amin(salt_high)) salt_max = max(amax(salt_low), amax(salt_high)) # Find deepest depth # Start with 0 depth_min = 0 # Modify with low-res patches for selm in selements_temp_low: depth_min = min(depth_min, amin(selm.z)) # Modify with high-res patches for selm in selements_temp_high: depth_min = min(depth_min, amin(selm.z)) # Round down to nearest 50 metres depth_min = floor(depth_min / 50) * 50 # Plot fig = figure(figsize=(18, 12)) # Low-res temperature ax = fig.add_subplot(2, 2, 1) img1 = PatchCollection(patches_low, cmap='jet') img1.set_array(temp_low) img1.set_edgecolor('face') img1.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img1) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title(r'Low-res temperature ($^{\circ}$C)', fontsize=20) ylabel('Depth (m)', fontsize=16) # High-res temperature ax = fig.add_subplot(2, 2, 2) img2 = PatchCollection(patches_high, cmap='jet') img2.set_array(temp_high) img2.set_edgecolor('face') img2.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img2) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title(r'High-res temperature ($^{\circ}$C)', fontsize=20) # Add colorbar for temperature cbaxes_temp = fig.add_axes([0.92, 0.575, 0.01, 0.3]) cbar_temp = colorbar(img2, cax=cbaxes_temp) cbar_temp.ax.tick_params(labelsize=16) # Low-res salinity ax = fig.add_subplot(2, 2, 3) img3 = PatchCollection(patches_low, cmap='jet') img3.set_array(salt_low) img3.set_edgecolor('face') img3.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img3) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title('Low-res salinity (psu)', fontsize=20) xlabel('Latitude', fontsize=16) ylabel('Depth (m)', fontsize=16) # High-res salinity ax = fig.add_subplot(2, 2, 4) img4 = PatchCollection(patches_high, cmap='jet') img4.set_array(salt_high) img4.set_edgecolor('face') img4.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img4) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title('High-res salinity (psu)', fontsize=20) xlabel('Latitude', fontsize=16) # Add colorbar for salinity cbaxes_salt = fig.add_axes([0.92, 0.125, 0.01, 0.3]) cbar_salt = colorbar(img4, cax=cbaxes_salt) cbar_salt.ax.tick_params(labelsize=16) # Main title suptitle(shelf_names[index] + lon_string, fontsize=28) #fig.show() fig.savefig(fig_heads[index] + '_zonal_ts.png')
def mip_seaice_tamura (): # File paths # ROMS grid (just for bathymetry) roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' # FESOM mesh paths fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' # CICE 1992-2013 mean ice production (precomputed in calc_ice_prod.py) cice_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/ice_prod_1992_2013.nc' # FESOM 1992-2013 mean ice production (precomputed in calc_annual_ice_prod.py in fesomtools) fesom_lr_file = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/ice_prod_1992_2013.nc' fesom_hr_file = '/short/y99/kaa561/FESOM/intercomparison_highres/output/ice_prod_1992_2013.nc' # Tamura's 1992-2013 mean ice production (precomputed on desktop with Matlab) tamura_file = '/short/m68/kaa561/tamura_1992_2013_monthly_climatology.nc' # Output ASCII file output_file = 'seaice_prod_bins.log' # Size of longitude bin dlon_bin = 1.0 # Definition of continental shelf: everywhere south of lat0 with # bathymetry shallower than h0 lat0 = -60 h0 = 1500 # Radius of the Earth in metres r = 6.371e6 # Degrees to radians conversion factor deg2rad = pi/180.0 # Set up longitude bins bin_edges = arange(-180, 180+dlon_bin, dlon_bin) bin_centres = 0.5*(bin_edges[:-1] + bin_edges[1:]) num_bins = len(bin_centres) print 'Processing MetROMS' # Read CICE grid id = Dataset(cice_file, 'r') cice_lon = id.variables['TLON'][:,:] cice_lat = id.variables['TLAT'][:,:] # Read sea ice production cice_data = id.variables['ice_prod'][:,:] id.close() # Get area integrands dx, dy = cartesian_grid_2d(cice_lon, cice_lat) dA = dx*dy # Make sure longitude is in the range [-180, 180] index = cice_lon > 180 cice_lon[index] = cice_lon[index] - 360 # Read bathymetry (ROMS grid file) and trim to CICE grid id = Dataset(roms_grid, 'r') cice_bathy = id.variables['h'][1:-1,1:-1] id.close() # Set up integral cice_data_bins = zeros(num_bins) # Loop over all cells num_lon = size(cice_lon,1) num_lat = size(cice_lat,0) for j in range(num_lat): for i in range(num_lon): # Check for land mask or ice shelves if cice_data[j,i] is ma.masked: continue # Check for continental shelf if cice_lat[j,i] < lat0 and cice_bathy[j,i] < h0: # Find the right bin bin_index = nonzero(bin_edges > cice_lon[j,i])[0][0] - 1 # Integrate (m^3/y) cice_data_bins[bin_index] += cice_data[j,i]*dA[j,i] # Convert to 10^9 m^3/y cice_data_bins *= 1e-9 print 'Processing low-res FESOM' # Build mesh elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True, cross_180=False) # Read sea ice production id = Dataset(fesom_lr_file, 'r') fesom_data_lr = id.variables['ice_prod'][:] id.close() # Set up integral fesom_data_bins_lr = zeros(num_bins) # Loop over elements for elm in elements_lr: # Exclude ice shelf cavities if not elm.cavity: # Check for continental shelf in 2 steps if all(elm.lat < lat0): elm_bathy = mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) if elm_bathy < h0: # Get element-averaged sea ice production elm_data = mean([fesom_data_lr[elm.nodes[0].id], fesom_data_lr[elm.nodes[1].id], fesom_data_lr[elm.nodes[2].id]]) # Find the right bin elm_lon = mean(elm.lon) if elm_lon < -180: elm_lon += 360 elif elm_lon > 180: elm_lon -= 360 bin_index = nonzero(bin_edges > elm_lon)[0][0] - 1 # Integrate (m^3/y) fesom_data_bins_lr[bin_index] += elm_data*elm.area() # Convert to 10^9 m^3/y fesom_data_bins_lr *= 1e-9 print 'Processing high-res FESOM' elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True, cross_180=False) id = Dataset(fesom_hr_file, 'r') fesom_data_hr = id.variables['ice_prod'][:] id.close() fesom_data_bins_hr = zeros(num_bins) for elm in elements_hr: if not elm.cavity: if all(elm.lat < lat0): elm_bathy = mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) if elm_bathy < h0: elm_data = mean([fesom_data_hr[elm.nodes[0].id], fesom_data_hr[elm.nodes[1].id], fesom_data_hr[elm.nodes[2].id]]) elm_lon = mean(elm.lon) if elm_lon < -180: elm_lon += 360 elif elm_lon > 180: elm_lon -= 360 bin_index = nonzero(bin_edges > elm_lon)[0][0] - 1 fesom_data_bins_hr[bin_index] += elm_data*elm.area() fesom_data_bins_hr *= 1e-9 print 'Processing Tamura obs' id = Dataset(tamura_file, 'r') # Read grid and data tamura_lon = id.variables['longitude'][:,:] tamura_lat = id.variables['latitude'][:,:] # Read sea ice formation tamura_data = id.variables['ice_prod'][:,:] id.close() # Interpolate to a regular grid so we can easily integrate over area dlon_reg = 0.2 dlat_reg = 0.1 lon_reg_edges = arange(-180, 180+dlon_reg, dlon_reg) lon_reg = 0.5*(lon_reg_edges[:-1] + lon_reg_edges[1:]) lat_reg_edges = arange(-80, -60+dlat_reg, dlat_reg) lat_reg = 0.5*(lat_reg_edges[:-1] + lat_reg_edges[1:]) lon_reg_2d, lat_reg_2d = meshgrid(lon_reg, lat_reg) dx_reg = r*cos(lat_reg_2d*deg2rad)*dlon_reg*deg2rad dy_reg = r*dlat_reg*deg2rad dA_reg = dx_reg*dy_reg # Be careful with the periodic boundary here num_pts = size(tamura_lon) num_wrap1 = count_nonzero(tamura_lon < -179) num_wrap2 = count_nonzero(tamura_lon > 179) points = empty([num_pts+num_wrap1+num_wrap2,2]) values = empty(num_pts+num_wrap1+num_wrap2) points[:num_pts,0] = ravel(tamura_lon) points[:num_pts,1] = ravel(tamura_lat) values[:num_pts] = ravel(tamura_data) # Wrap the periodic boundary on both sides index = tamura_lon < -179 points[num_pts:num_pts+num_wrap1,0] = tamura_lon[index] + 360 points[num_pts:num_pts+num_wrap1,1] = tamura_lat[index] values[num_pts:num_pts+num_wrap1] = tamura_data[index] index = tamura_lon > 179 points[num_pts+num_wrap1:,0] = tamura_lon[index] - 360 points[num_pts+num_wrap1:,1] = tamura_lat[index] values[num_pts+num_wrap1:] = tamura_data[index] values = ma.masked_where(isnan(values), values) xi = empty([size(lon_reg_2d),2]) xi[:,0] = ravel(lon_reg_2d) xi[:,1] = ravel(lat_reg_2d) result = griddata(points, values, xi) tamura_data_reg = reshape(result, shape(lon_reg_2d)) # Now, regrid the MetROMS bathymetry to this regular grid num_pts = size(cice_lon) num_wrap1 = count_nonzero(cice_lon < -179) num_wrap2 = count_nonzero(cice_lon > 179) points = empty([num_pts+num_wrap1+num_wrap2,2]) values = empty(num_pts+num_wrap1+num_wrap2) points[:num_pts,0] = ravel(cice_lon) points[:num_pts,1] = ravel(cice_lat) values[:num_pts] = ravel(cice_bathy) index = cice_lon < -179 points[num_pts:num_pts+num_wrap1,0] = cice_lon[index] + 360 points[num_pts:num_pts+num_wrap1,1] = cice_lat[index] values[num_pts:num_pts+num_wrap1] = cice_bathy[index] index = cice_lon > 179 points[num_pts+num_wrap1:,0] = cice_lon[index] - 360 points[num_pts+num_wrap1:,1] = cice_lat[index] values[num_pts+num_wrap1:] = cice_bathy[index] values = ma.masked_where(isnan(values), values) xi = empty([size(lon_reg_2d),2]) xi[:,0] = ravel(lon_reg_2d) xi[:,1] = ravel(lat_reg_2d) result = griddata(points, values, xi) bathy_reg = reshape(result, shape(lon_reg_2d)) # Mask everything but the continental shelf from dA_reg dA_reg = ma.masked_where(lat_reg_2d > lat0, dA_reg) dA_reg = ma.masked_where(bathy_reg > h0, dA_reg) # Mask the land mask (and ice shelves) from tamura_data_reg tamura_data_reg = ma.masked_where(isnan(tamura_data_reg), tamura_data_reg) # Set up integral tamura_data_bins = zeros(num_bins) # Loop over longitude only for i in range(len(lon_reg)): # Find the right bin bin_index = nonzero(bin_edges > lon_reg[i])[0][0] - 1 # Integrate (m^3/y) tamura_data_bins[bin_index] += sum(tamura_data_reg[:,i]*dA_reg[:,i]) # Convert to 10^9 m^3/y tamura_data_bins *= 1e-9 # Write data to ASCII file print 'Writing to file' f = open(output_file, 'w') f.write('Longitude:\n') for val in bin_centres: f.write(str(val) + '\n') f.write('MetROMS sea ice production (10^9 m^3/y):\n') for val in cice_data_bins: f.write(str(val) + '\n') f.write('FESOM (low-res) sea ice production (10^9 m^3/y):\n') for val in fesom_data_bins_lr: f.write(str(val) + '\n') f.write('FESOM (high-res) sea ice production (10^9 m^3/y):\n') for val in fesom_data_bins_hr: f.write(str(val) + '\n') f.write('Tamura sea ice production (10^9 m^3/y):\n') for val in tamura_data_bins: f.write(str(val) + '\n') f.close()
def timeseries_watermass_temp_salt(mesh_path, output_path, start_year, end_year, log_file): # Titles for each sector sector_names = [ 'Filchner-Ronne Ice Shelf Cavity', 'Eastern Weddell Region Cavities', 'Amery Ice Shelf Cavity', 'Australian Sector Cavities', 'Ross Sea Cavities', 'Amundsen Sea Cavities', 'Bellingshausen Sea Cavities', 'Larsen Ice Shelf Cavities', 'All Ice Shelf Cavities' ] num_sectors = len(sector_names) # Water masses to consider wm_names = ['ISW', 'HSSW', 'LSSW', 'AASW', 'MCDW', 'CDW'] num_watermasses = len(wm_names) # Only consider elements south of 30S circumpolar = True # Don't make second copies of elements that cross 180E cross_180 = False # Naming conventions for FESOM output files file_head = output_path + 'MK44005.' file_tail = '.oce.mean.nc' num_years = end_year - start_year + 1 temp_watermass = zeros([num_watermasses, num_sectors, num_years]) salt_watermass = zeros([num_watermasses, num_sectors, num_years]) print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Categorising elements into sectors' location_flag = zeros([num_sectors, len(elements)]) for i in range(len(elements)): elm = elements[i] # Make sure we're actually in an ice shelf cavity if elm.cavity: # Figure out which sector this ice shelf element falls into lon = mean(elm.lon) lat = mean(elm.lat) if lon >= -85 and lon < -30 and lat < -74: # Filchner-Ronne location_flag[0, i] = 1 elif lon >= -30 and lon < 65: # Eastern Weddell region location_flag[1, i] = 1 elif lon >= 65 and lon < 76: # Amery location_flag[2, i] = 1 elif lon >= 76 and lon < 165 and lat >= -74: # Australian sector location_flag[3, i] = 1 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): # Ross Sea location_flag[4, i] = 1 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): # Amundsen Sea location_flag[5, i] = 1 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): # Bellingshausen Sea location_flag[6, i] = 1 elif lon >= -66 and lon < -59 and lat >= -74: # Larsen Ice Shelves location_flag[7, i] = 1 else: print 'No region found for lon=', str(lon), ', lat=', str(lat) break #return # All ice shelf elements are in Total Antarctica location_flag[8, i] = 1 print 'Calculating average temperature and salinity' # Loop over years for year in range(start_year, end_year + 1): print 'Processing year ' + str(year) # Initialise volume of each water mass in each sector vol_watermass = zeros([num_watermasses, num_sectors]) # Read temperature and salinity for this year, annually average id = Dataset(file_head + str(year) + file_tail, 'r') temp = mean(id.variables['temp'][:, :], axis=0) salt = mean(id.variables['salt'][:, :], axis=0) id.close() # Loop over elements for i in range(len(elements)): elm = elements[i] # Check if we're in an ice shelf cavity if elm.cavity: # Get area of 2D element area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[ 1].below is None or nodes[2].below is None: # Reached the bottom break # Calculate average temperature, salinity, and # layer thickness for this 3D triangular prism temp_vals = [] salt_vals = [] dz_vals = [] for n in range(3): temp_vals.append(temp[nodes[n].id]) salt_vals.append(salt[nodes[n].id]) temp_vals.append(temp[nodes[n].below.id]) salt_vals.append(salt[nodes[n].below.id]) dz_vals.append( abs(nodes[n].depth - nodes[n].below.depth)) # Get ready for next iteration of loop nodes[n] = nodes[n].below curr_temp = mean(array(temp_vals)) curr_salt = mean(array(salt_vals)) curr_volume = area * mean(array(dz_vals)) # Get surface freezing point at this salinity curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt( curr_salt**3) - 2.155e-4 * curr_salt**2 # Figure out what water mass this is if curr_temp < curr_tfrz: # ISW wm_key = 0 elif curr_salt < 34: # AASW wm_key = 3 elif curr_temp > 0: # CDW wm_key = 5 elif curr_temp > -1.5: # MCDW wm_key = 4 elif curr_salt < 34.5: # LSSW wm_key = 2 else: # HSSW wm_key = 1 # Integrate temperature and salinity, weighted with # volume, for sector(s) the element is in curr_sectors = 0 for sector in range(num_sectors): if location_flag[sector, i] == 1: curr_sectors += 1 temp_watermass[ wm_key, sector, year - start_year] += curr_temp * curr_volume salt_watermass[ wm_key, sector, year - start_year] += curr_salt * curr_volume vol_watermass[wm_key, sector] += curr_volume # Should be in exactly 2 sectors (1 + total Antarctica) if curr_sectors != 2: print 'Wrong number of sectors for element ' + str(i) # Convert from integrals to averages for wm_key in range(num_watermasses): for sector in range(num_sectors): if vol_watermass[wm_key, sector] == 0: # No such water mass, set average temp and salt to NaN temp_watermass[wm_key, sector, year - start_year] = NaN salt_watermass[wm_key, sector, year - start_year] = NaN else: temp_watermass[wm_key, sector, year - start_year] = temp_watermass[ wm_key, sector, year - start_year] / vol_watermass[wm_key, sector] salt_watermass[wm_key, sector, year - start_year] = salt_watermass[ wm_key, sector, year - start_year] / vol_watermass[wm_key, sector] print 'Saving results to log file' f = open(log_file, 'w') for wm_key in range(num_watermasses): for sector in range(num_sectors): f.write('Average temperature of ' + wm_names[wm_key] + ' in ' + sector_names[sector] + '(C)\n') for t in range(num_years): f.write(str(temp_watermass[wm_key, sector, t]) + '\n') for wm_key in range(num_watermasses): for sector in range(num_sectors): f.write('Average salinity of ' + wm_names[wm_key] + ' in ' + sector_names[sector] + '(psu)\n') for t in range(num_years): f.write(str(salt_watermass[wm_key, sector, t]) + '\n') f.close()
def mip_circumpolar_drift(): # File paths # ECCO2 initial conditions file for temperature ecco2_ini_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.199201.nc' # ROMS grid file roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' # ROMS January 2016 mean temp roms_end_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/temp_salt_jan2016.nc' # FESOM mesh paths fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' # FESOM January 2016 mean temp fesom_end_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/temp_salt_jan2016.nc' fesom_end_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/temp_salt_jan2016.nc' # Depth bounds to average between shallow_bound = 300 deep_bound = 1000 # ROMS grid parameters theta_s = 7.0 theta_b = 2.0 hc = 250 N = 31 deg2rad = pi / 180 # Bound for colour scale colour_bound = 3 # Northern boundary for plot nbdry = -50 + 90 print 'Processing ECCO2' id = Dataset(ecco2_ini_file, 'r') ecco_lon_tmp = id.variables['LONGITUDE_T'][:] ecco_lat = id.variables['LATITUDE_T'][:] ecco_depth = id.variables['DEPTH_T'][:] # Depth is positive ecco_temp_3d_tmp = id.variables['THETA'][0, :, :, :] id.close() # Wrap periodic boundary ecco_lon = zeros(size(ecco_lon_tmp) + 2) ecco_lon[0] = ecco_lon_tmp[-1] - 360 ecco_lon[1:-1] = ecco_lon_tmp ecco_lon[-1] = ecco_lon_tmp[0] + 360 ecco_temp_3d = ma.array( zeros((size(ecco_depth), size(ecco_lat), size(ecco_lon)))) ecco_temp_3d[:, :, 0] = ecco_temp_3d_tmp[:, :, -1] ecco_temp_3d[:, :, 1:-1] = ecco_temp_3d_tmp ecco_temp_3d[:, :, -1] = ecco_temp_3d_tmp[:, :, 0] # Calculate dz ecco_depth_edges = zeros(size(ecco_depth) + 1) ecco_depth_edges[1:-1] = 0.5 * (ecco_depth[:-1] + ecco_depth[1:]) # Surface is zero # Extrapolate for bottom ecco_depth_edges[-1] = 2 * ecco_depth[-1] - ecco_depth_edges[-2] ecco_dz = ecco_depth_edges[1:] - ecco_depth_edges[:-1] # Average between bounds # Find the first level below shallow_bound k_start = nonzero(ecco_depth > shallow_bound)[0][0] # Find the first level below deep_bound # Don't worry about regions where this hits the seafloor, as they will # get masked out in the final plot k_end = nonzero(ecco_depth > deep_bound)[0][0] # Integrate between ecco_temp = sum( ecco_temp_3d[k_start:k_end, :, :] * ecco_dz[k_start:k_end, None, None], axis=0) / sum(ecco_dz[k_start:k_end]) # Fill land mask with zeros index = ecco_temp.mask ecco_temp = ecco_temp.data ecco_temp[index] = 0.0 # Prepare interpolation function interp_function = RegularGridInterpolator((ecco_lat, ecco_lon), ecco_temp) print 'Processing MetROMS' # Read grid id = Dataset(roms_grid, 'r') roms_h = id.variables['h'][:, :] roms_zice = id.variables['zice'][:, :] roms_mask = id.variables['mask_rho'][:, :] roms_lon = id.variables['lon_rho'][:, :] roms_lat = id.variables['lat_rho'][:, :] num_lon = size(roms_lon, 1) num_lat = size(roms_lat, 0) id.close() # Interpolate ECCO2 depth-averaged values to the ROMS grid roms_temp_ini = interp_function((roms_lat, roms_lon)) # Apply ROMS land mask roms_temp_ini = ma.masked_where(roms_mask == 0, roms_temp_ini) # Read Jan 2016 values id = Dataset(roms_end_file, 'r') roms_temp_3d_end = id.variables['temp'][0, :, :, :] id.close() # Get z and dz roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d( roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N) # Vertically average between given depths roms_temp_end = average_btw_depths(roms_temp_3d_end, roms_z, roms_dz, [-1 * shallow_bound, -1 * deep_bound]) # Mask regions shallower than 1000 m roms_temp_ini = ma.masked_where(roms_h < deep_bound, roms_temp_ini) roms_temp_end = ma.masked_where(roms_h < deep_bound, roms_temp_end) # Mask ice shelf cavities roms_temp_ini = ma.masked_where(roms_zice < 0, roms_temp_ini) roms_temp_end = ma.masked_where(roms_zice < 0, roms_temp_end) # Get difference roms_temp_drift = roms_temp_end - roms_temp_ini # Convert to spherical coordinates roms_x = -(roms_lat + 90) * cos(roms_lon * deg2rad + pi / 2) roms_y = (roms_lat + 90) * sin(roms_lon * deg2rad + pi / 2) print 'Processing low-res FESOM' print '...Building mesh' elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True) # Read rotated lat and lon for each 2D node f = open(fesom_mesh_path_lr + 'nod2d.out', 'r') f.readline() rlon_lr = [] rlat_lr = [] for line in f: tmp = line.split() lon_tmp = float(tmp[1]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 rlon_lr.append(lon_tmp) rlat_lr.append(float(tmp[2])) f.close() rlon_lr = array(rlon_lr) rlat_lr = array(rlat_lr) # Unrotate grid fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr) # Get longitude in the range (-180, 180) to match ECCO index = fesom_lon_lr < 0 fesom_lon_lr[index] = fesom_lon_lr[index] + 360 print '...Interpolating ECCO2' fesom_temp_nodes_ini_lr = interp_function((fesom_lat_lr, fesom_lon_lr)) # Read January 2016 temp id = Dataset(fesom_end_file_lr, 'r') fesom_temp_3d_nodes_end_lr = id.variables['temp'][0, :] id.close() print '...Looping over elements' fesom_temp_ini_lr = [] fesom_temp_end_lr = [] patches_lr = [] for elm in elements_lr: # Make sure we're not in an ice shelf cavity, or shallower than deep_bound if not elm.cavity: if all( array([ elm.nodes[0].find_bottom().depth, elm.nodes[1]. find_bottom().depth, elm.nodes[2].find_bottom().depth ]) > deep_bound): # Add a new patch coord = transpose(vstack((elm.x, elm.y))) patches_lr.append(Polygon(coord, True, linewidth=0.)) # Average initial temp over element fesom_temp_ini_lr.append( mean([ fesom_temp_nodes_ini_lr[elm.nodes[0].id], fesom_temp_nodes_ini_lr[elm.nodes[1].id], fesom_temp_nodes_ini_lr[elm.nodes[2].id] ])) # Vertically integrate final temp for this element fesom_temp_end_lr.append( fesom_element_average_btw_depths( elm, shallow_bound, deep_bound, fesom_temp_3d_nodes_end_lr)) fesom_temp_ini_lr = array(fesom_temp_ini_lr) fesom_temp_end_lr = array(fesom_temp_end_lr) # Get difference fesom_temp_drift_lr = fesom_temp_end_lr - fesom_temp_ini_lr print 'Processing high-res FESOM' print '...Building mesh' elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True) f = open(fesom_mesh_path_hr + 'nod2d.out', 'r') f.readline() rlon_hr = [] rlat_hr = [] for line in f: tmp = line.split() lon_tmp = float(tmp[1]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 rlon_hr.append(lon_tmp) rlat_hr.append(float(tmp[2])) f.close() rlon_hr = array(rlon_hr) rlat_hr = array(rlat_hr) fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr) index = fesom_lon_hr < 0 fesom_lon_hr[index] = fesom_lon_hr[index] + 360 print '...Interpolating ECCO2' fesom_temp_nodes_ini_hr = interp_function((fesom_lat_hr, fesom_lon_hr)) id = Dataset(fesom_end_file_hr, 'r') fesom_temp_3d_nodes_end_hr = id.variables['temp'][0, :] id.close() print '...Looping over elements' fesom_temp_ini_hr = [] fesom_temp_end_hr = [] patches_hr = [] for elm in elements_hr: if not elm.cavity: if all( array([ elm.nodes[0].find_bottom().depth, elm.nodes[1]. find_bottom().depth, elm.nodes[2].find_bottom().depth ]) > deep_bound): coord = transpose(vstack((elm.x, elm.y))) patches_hr.append(Polygon(coord, True, linewidth=0.)) fesom_temp_ini_hr.append( mean([ fesom_temp_nodes_ini_hr[elm.nodes[0].id], fesom_temp_nodes_ini_hr[elm.nodes[1].id], fesom_temp_nodes_ini_hr[elm.nodes[2].id] ])) fesom_temp_end_hr.append( fesom_element_average_btw_depths( elm, shallow_bound, deep_bound, fesom_temp_3d_nodes_end_hr)) fesom_temp_ini_hr = array(fesom_temp_ini_hr) fesom_temp_end_hr = array(fesom_temp_end_hr) fesom_temp_drift_hr = fesom_temp_end_hr - fesom_temp_ini_hr print 'Plotting' fig = figure(figsize=(19, 8)) fig.patch.set_facecolor('white') gs = GridSpec(1, 3) gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05) # ROMS ax = subplot(gs[0, 0], aspect='equal') ax.pcolor(roms_x, roms_y, roms_temp_drift, vmin=-colour_bound, vmax=colour_bound, cmap='RdBu_r') xlim([-nbdry, nbdry]) ylim([-nbdry, nbdry]) title('a) MetROMS', fontsize=28) ax.set_xticks([]) ax.set_yticks([]) # FESOM (low-res) ax = subplot(gs[0, 1], aspect='equal') img = PatchCollection(patches_lr, cmap='RdBu_r') img.set_array(fesom_temp_drift_lr) img.set_clim(vmin=-colour_bound, vmax=colour_bound) img.set_edgecolor('face') ax.add_collection(img) xlim([-nbdry, nbdry]) ylim([-nbdry, nbdry]) title('b) FESOM (low-res)', fontsize=28) ax.set_xticks([]) ax.set_yticks([]) # FESOM (high-res) ax = subplot(gs[0, 2], aspect='equal') img = PatchCollection(patches_hr, cmap='RdBu_r') img.set_array(fesom_temp_drift_hr) img.set_clim(vmin=-colour_bound, vmax=colour_bound) img.set_edgecolor('face') ax.add_collection(img) xlim([-nbdry, nbdry]) ylim([-nbdry, nbdry]) title('c) FESOM (high-res)', fontsize=28) ax.set_xticks([]) ax.set_yticks([]) # Add a horizontal colourbar on the bottom cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04]) cbar = colorbar(img, orientation='horizontal', cax=cbaxes, ticks=arange(-colour_bound, colour_bound + 1, 1), extend='both') cbar.ax.tick_params(labelsize=20) # Main title suptitle(r'Change in temperature from initial conditions ($^{\circ}$C), ' + str(shallow_bound) + '-' + str(deep_bound) + ' m average', fontsize=34) fig.show() fig.savefig('circumpolar_temp_drift.png')
def zonal_slice_plot (mesh_path, file_path, var_name, tstep, lon0, depth_min, save=False, fig_name=None, set_limits=False, limits=None): # Set northern boundary and upper (surface) boundary lat_max = -50 depth_max = 0 # Font sizes for figure font_sizes = [30, 24, 20] # Read variable name and units for title id = Dataset(file_path, 'r') varid = id.variables[var_name] name = varid.getncattr('description') units = varid.getncattr('units') if lon0 < 0: lon_string = 'at ' + str(-lon0) + 'W' else: lon_string = 'at ' + str(lon0) + 'E' # Read data data = id.variables[var_name][tstep-1,:] # Check for vector variables that need to be unrotated if var_name in ['u', 'v']: # Read the rotated lat and lon fid = open(mesh_path + 'nod3d.out', 'r') fid.readline() lon = [] lat = [] for line in fid: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 lon.append(lon_tmp) lat.append(lat_tmp) fid.close() lon = array(lon) lat = array(lat) if var_name == 'u': u_data = data[:] v_data = id.variables['v'][tstep-1,:] u_data_lonlat, v_data_lonlat = unrotate_vector(lon, lat, u_data, v_data) data = u_data_lonlat[:] elif var_name == 'v': v_data = data[:] u_data = id.variables['u'][tstep-1,:] u_data_lonlat, v_data_lonlat = unrotate_vector(lon, lat, u_data, v_data) data = v_data_lonlat[:] id.close() # Build the regular FESOM grid elm2D = fesom_grid(mesh_path) # Build the array of SideElements making up the zonal slice selements = fesom_sidegrid(elm2D, data, lon0, lat_max) # Build an array of quadrilateral patches for the plot, and of data values # corresponding to each SideElement # Also find the minimum latitude of any SideElement patches = [] values = [] lat_min = lat_max for selm in selements: # Make patch coord = transpose(vstack((selm.y,selm.z))) patches.append(Polygon(coord, True, linewidth=0.)) # Save data value values.append(selm.var) # Update minimum latitude if needed lat_min = min(lat_min, amin(selm.y)) # Set southern boundary to be just south of the minimum latitude lat_min = lat_min-1 # Choose colour bounds if set_limits: # User-specified bounds var_min = limits[0] var_max = limits[1] if var_min == -var_max: # Bounds are centered on zero, so choose a blue-to-red colourmap # centered on yellow colour_map = 'RdYlBu_r' else: colour_map = 'jet' else: # Determine bounds automatically if var_name in ['u', 'v', 'w']: # Center levels on 0 for certain variables, with a blue-to-red # colourmap max_val = amax(abs(array(values))) var_min = -max_val var_max = max_val colour_map = 'RdYlBu_r' else: var_min = amin(array(values)) var_max = amax(array(values)) colour_map = 'jet' # Set up plot fig = figure(figsize=(16,8)) ax = fig.add_subplot(1,1,1) # Set colourmap for patches, and refer it to the values array img = PatchCollection(patches, cmap=colour_map) img.set_array(array(values)) img.set_edgecolor('face') # Add patches to plot ax.add_collection(img) # Configure plot xlim(lat_min, lat_max) ylim(depth_min, depth_max) title(name + ' (' + units + ') ' + lon_string, fontsize=font_sizes[0]) xlabel('Latitude', fontsize=font_sizes[1]) ylabel('Depth (m)', fontsize=font_sizes[1]) setp(ax.get_xticklabels(), fontsize=font_sizes[2]) setp(ax.get_yticklabels(), fontsize=font_sizes[2]) cbar = colorbar(img) cbar.ax.tick_params(labelsize=font_sizes[2]) img.set_clim(vmin=var_min, vmax=var_max) if save: fig.savefig(fig_name) else: fig.show()
def timeseries_massloss (mesh_path, diag_file, log_file): # Titles and figure names for each ice shelf names = ['All Ice Shelves', 'Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'] fig_names = ['total_massloss.png', 'larsen_d.png', 'larsen_c.png', 'wilkins_georgevi_stange.png', 'ronne_filchner.png', 'abbot.png', 'pig.png', 'thwaites.png', 'dotson.png', 'getz.png', 'nickerson.png', 'sulzberger.png', 'mertz.png', 'totten_moscowuni.png', 'shackleton.png', 'west.png', 'amery.png', 'princeharald.png', 'baudouin_borchgrevink.png', 'lazarev.png', 'nivl.png', 'fimbul_jelbart_ekstrom.png', 'brunt_riiserlarsen.png', 'ross.png'] # Limits on longitude and latitude for each ice shelf # These depend on the source geometry, in this case RTopo 1.05 # Note there is one extra index at the end of each array; this is because # the Ross region crosses the line 180W and therefore is split into two # We have -181 and 181 not -180 and 180 at this boundary so that # elements which cross the boundary are still counted lon_min = [-181, -62.67, -65.5, -79.17, -85, -104.17, -102.5, -108.33, -114.5, -135.67, -149.17, -155, 144, 115, 94.17, 80.83, 65, 33.83, 19, 12.9, 9.33, -10.05, -28.33, -181, 158.33] lon_max = [181, -59.33, -60, -66.67, -28.33, -88.83, -99.17, -103.33, -111.5, -114.33, -140, -145, 146.62, 123.33, 102.5, 89.17, 75, 37.67, 33.33, 16.17, 12.88, 7.6, -10.33, -146.67, 181] lat_min = [-90, -73.03, -69.35, -74.17, -83.5, -73.28, -75.5, -75.5, -75.33, -74.9, -76.42, -78, -67.83, -67.17, -66.67, -67.83, -73.67, -69.83, -71.67, -70.5, -70.75, -71.83, -76.33, -85, -84.5] lat_max = [-30, -69.37, -66.13, -69.5, -74.67, -71.67, -74.17, -74.67, -73.67, -73, -75.17, -76.41, -66.67, -66.5, -64.83, -66.17, -68.33, -68.67, -68.33, -69.33, -69.83, -69.33, -71.5, -77.77, -77] # Observed mass loss (Rignot 2013) and uncertainty for each ice shelf, in Gt/y obs_massloss = [1325, 1.4, 20.7, 135.4, 155.4, 51.8, 101.2, 97.5, 45.2, 144.9, 4.2, 18.2, 7.9, 90.6, 72.6, 27.2, 35.5, -2, 21.6, 6.3, 3.9, 26.8, 9.7, 47.7] obs_massloss_error = [235, 14, 67, 40, 45, 19, 8, 7, 4, 14, 2, 3, 3, 8, 15, 10, 23, 3, 18, 2, 2, 14, 16, 34] # Observed ice shelf melt rates and uncertainty obs_ismr = [0.85, 0.1, 0.4, 3.1, 0.3, 1.7, 16.2, 17.7, 7.8, 4.3, 0.6, 1.5, 1.4, 7.7, 2.8, 1.7, 0.6, -0.4, 0.4, 0.7, 0.5, 0.5, 0.1, 0.1] obs_ismr_error = [0.1, 0.6, 1, 0.8, 0.1, 0.6, 1, 1, 0.6, 0.4, 0.3, 0.3, 0.6, 0.7, 0.6, 0.7, 0.4, 0.6, 0.4, 0.2, 0.2, 0.2, 0.2, 0.1] # Density of ice in kg/m^3 rho_ice = 916 circumpolar = True # Only consider elements south of 30S cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step tmp_massloss = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: try: tmp_massloss.append(float(line)) except(ValueError): # Reached the header for the next variable break start_t = len(tmp_massloss) # Set up array for mass loss values at each ice shelf old_massloss = empty([len(names), start_t]) # Fill in the first timeseries (entire continent) old_massloss[0,:] = tmp_massloss[:] index = 1 # Loop over the individual ice shelves while index < len(names): t = 0 for line in f: try: old_massloss[index, t] = float(line) t += 1 except(ValueError): # Reached the header for the next ice shelf break index +=1 else: start_t = 0 print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Reading data' id = Dataset(diag_file, 'r') num_time = id.variables['time'].shape[0] # Set up array of mass loss values massloss = empty([len(names), start_t+num_time]) if exists(log_file): # Fill first start_t timesteps with existing values massloss[:,0:start_t] = old_massloss[:,:] # Read melt rate and convert from m/s to m/y ismr = id.variables['wnet'][:,:]*365.25*24*60*60 id.close() print 'Setting up arrays' # Melt rate timeseries at each element ismr_elm = zeros([num_time, len(elements)]) # Area of each element area_elm = zeros(len(elements)) # Flag to indicate which ice shelves the element is part of location_flag = zeros([len(names), len(elements)]) # Loop over each element to fill these in for i in range(len(elements)): elm = elements[i] # Make sure we're actually in an ice shelf cavity if elm.cavity: # Average ice shelf melt rate timeseries over 3 component nodes ismr_elm[:,i] = (ismr[:,elm.nodes[0].id] + ismr[:,elm.nodes[1].id] + ismr[:,elm.nodes[2].id])/3 # Call area function area_elm[i] = elm.area() # Loop over ice shelves for index in range(len(names)): # Figure out whether or not this element is part of the given # ice shelf if all(elm.lon >= lon_min[index]) and all(elm.lon <= lon_max[index]) and all(elm.lat >= lat_min[index]) and all(elm.lat <= lat_max[index]): location_flag[index,i] = 1 if index == len(names)-1: # Ross region is split into two if all(elm.lon >= lon_min[index+1]) and all(elm.lon <= lon_max[index+1]) and all(elm.lat >= lat_min[index+1]) and all(elm.lat <= lat_max[index+1]): location_flag[index,i] = 1 # Calculate conversion factors from mass loss to area-averaged melt rate # for each ice shelf factors = empty(len(names)) for index in range(len(names)): # Calculate area of the ice shelf tmp_area = sum(area_elm*location_flag[index,:]) factors[index] = 1e12/(rho_ice*tmp_area) print 'Area of ' + names[index] + ': ' + str(tmp_area) + ' m^2' # Build timeseries for t in range(num_time): # Loop over ice shelves for index in range(len(names)): # Integrate ice shelf melt rate over area to get volume loss volumeloss = sum(ismr_elm[t,:]*area_elm*location_flag[index,:]) # Convert to mass loss in Gt/y massloss[index,start_t+t] = 1e-12*rho_ice*volumeloss # Calculate time values time = arange(size(massloss,1))*days_per_output/365. print 'Plotting' for index in range(len(names)): # Calculate the bounds on observed mass loss and melt rate massloss_low = obs_massloss[index] - obs_massloss_error[index] massloss_high = obs_massloss[index] + obs_massloss_error[index] ismr_low = obs_ismr[index] - obs_ismr_error[index] ismr_high = obs_ismr[index] + obs_ismr_error[index] # Set up plot: mass loss and melt rate are directly proportional (with # a different constant of proportionality for each ice shelf depending # on its area) so plot one line with two y-axes fig, ax1 = subplots() ax1.plot(time, massloss[index,:], color='black') # In blue, add dashed lines for observed mass loss ax1.axhline(massloss_low, color='b', linestyle='dashed') ax1.axhline(massloss_high, color='b', linestyle='dashed') # Make sure y-limits won't cut off observed melt rate ymin = amin([ismr_low/factors[index], massloss_low, amin(massloss[index,:])]) ymax = amax([ismr_high/factors[index], massloss_high, amax(massloss[index,:])]) # Adjust y-limits to line up with ticks ticks = ax1.get_yticks() min_tick = ticks[0] max_tick = ticks[-1] dtick = ticks[1]-ticks[0] while min_tick >= ymin: min_tick -= dtick while max_tick <= ymax: max_tick += dtick ax1.set_ylim([min_tick, max_tick]) # Title and ticks in blue for this side of the plot ax1.set_ylabel('Basal Mass Loss (Gt/y)', color='b') for t1 in ax1.get_yticklabels(): t1.set_color('b') ax1.set_xlabel('Years') ax1.grid(True) # Twin axis for melt rates ax2 = ax1.twinx() # Make sure the scales line up limits = ax1.get_ylim() ax2.set_ylim([limits[0]*factors[index], limits[1]*factors[index]]) # In red, add dashed lines for observed ice shelf melt rates ax2.axhline(ismr_low, color='r', linestyle='dashed') ax2.axhline(ismr_high, color='r', linestyle='dashed') # Title and ticks in red for this side of the plot ax2.set_ylabel('Area-Averaged Ice Shelf Melt Rate (m/y)', color='r') for t2 in ax2.get_yticklabels(): t2.set_color('r') # Name of the ice shelf for the main title title(names[index]) fig.savefig(fig_names[index]) print 'Saving results to log file' f = open(log_file, 'w') for index in range(len(names)): f.write(names[index] + ' Basal Mass Loss\n') for t in range(size(time)): f.write(str(massloss[index, t]) + '\n') f.close()
def timeseries_seaice (mesh_path, ice_file, log_file): circumpolar = True # Only consider elements south of 30S cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step total_area = [] total_volume = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: try: total_area.append(float(line)) except(ValueError): # Reached the header for the next variable break for line in f: total_volume.append(float(line)) f.close() print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Reading data' id = Dataset(ice_file, 'r') num_time = id.variables['time'].shape[0] aice = id.variables['area'][:,:] hice = id.variables['hice'][:,:] id.close() print 'Setting up arrays' # Sea ice concentration at each element aice_elm = zeros([num_time, len(elements)]) # Sea ice height at each element hice_elm = zeros([num_time, len(elements)]) # Area of each element area_elm = zeros(len(elements)) # Loop over elements to fill these in for i in range(len(elements)): elm = elements[i] # Average aice and hi over 3 component nodes aice_elm[:,i] = (aice[:,elm.nodes[0].id] + aice[:,elm.nodes[1].id] + aice[:,elm.nodes[2].id])/3 hice_elm[:,i] = (hice[:,elm.nodes[0].id] + hice[:,elm.nodes[1].id] + hice[:,elm.nodes[2].id])/3 # Call area function area_elm[i] = elm.area() # Build timeseries for t in range(num_time): # Integrate area and convert to million km^2 total_area.append(sum(aice_elm[t,:]*area_elm)*1e-12) # Integrate volume and convert to million km^3 total_volume.append(sum(aice_elm[t,:]*hice_elm[t,:]*area_elm)*1e-12) # Calculate time values time = arange(len(total_area))*days_per_output/365. print 'Plotting total sea ice area' clf() plot(time, total_area) xlabel('Years') ylabel(r'Total Sea Ice Area (million km$^2$)') grid(True) savefig('seaice_area.png') print 'Plotting total sea ice volume' clf() plot(time, total_volume) xlabel('Years') ylabel(r'Total Sea Ice Volume (million km$^2$)') grid(True) savefig('seaice_volume.png') print 'Saving results to log file' f = open(log_file, 'w') f.write('Total Sea Ice Area (million km^2):\n') for elm in total_area: f.write(str(elm) + '\n') f.write('Total Sea Ice Volume (million km^3):\n') for elm in total_volume: f.write(str(elm) + '\n') f.close()
def wind_stress_curl(): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = [ '/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/', '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/' ] file_beg = 'annual_avg.forcing.diag.1996.2005.nc' file_end = 'annual_avg.forcing.diag.2091.2100.nc' # Titles for plotting expt_names = [ 'RCP 4.5 MMM', 'RCP 4.5 ACCESS', 'RCP 8.5 MMM', 'RCP 8.5 ACCESS' ] expt_filenames = ['rcp45_m', 'rcp45_a', 'rcp85_m', 'rcp85_a'] num_expts = len(directories) colours = ['blue', 'cyan', 'green', 'magenta'] # Bounds on regular grid lon_min = -180 lon_max = 180 lat_min = -75 lat_max = -50 # Number of points on regular grid num_lon = 1000 num_lat = 200 # Radius of the Earth in metres r = 6.371e6 # Degrees to radians coversion factor deg2rad = pi / 180.0 # Don't consider values above this threshold (small, negative) threshold = -5e-8 print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar=True, cross_180=True) # Read (rotated) lon and lat at each 2D node f = open(mesh_path + 'nod2d.out', 'r') n2d = int(f.readline()) rlon = [] rlat = [] for line in f: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 rlon.append(lon_tmp) rlat.append(lat_tmp) f.close() rlon = array(rlon) rlat = array(rlat) print 'Reading data' print '...1996-2005' # Read rotated wind stress components id = Dataset(directory_beg + file_beg, 'r') stress_xr = id.variables['stress_x'][0, :] stress_yr = id.variables['stress_y'][0, :] id.close() # Unrotate stress_x_beg, stress_y_beg = unrotate_vector(rlon, rlat, stress_xr, stress_yr) # Set up array for wind stress in each RCP experiment stress_x_end = zeros([num_expts, n2d]) stress_y_end = zeros([num_expts, n2d]) for expt in range(num_expts): print '...' + expt_names[expt] id = Dataset(directories[expt] + file_end, 'r') stress_xr = id.variables['stress_x'][0, :] stress_yr = id.variables['stress_y'][0, :] id.close() stress_x_tmp, stress_y_tmp = unrotate_vector(rlon, rlat, stress_xr, stress_yr) stress_x_end[expt, :] = stress_x_tmp stress_y_end[expt, :] = stress_y_tmp print 'Interpolating to regular grid' # Set up regular grid # Start with boundaries lon_reg_edges = linspace(lon_min, lon_max, num_lon + 1) lat_reg_edges = linspace(lat_min, lat_max, num_lat + 1) # Now get centres lon_reg = 0.5 * (lon_reg_edges[:-1] + lon_reg_edges[1:]) lat_reg = 0.5 * (lat_reg_edges[:-1] + lat_reg_edges[1:]) # Also get differentials in lon-lat space dlon = lon_reg_edges[1:] - lon_reg_edges[:-1] dlat = lat_reg_edges[1:] - lat_reg_edges[:-1] # Make 2D versions lon_reg_2d, lat_reg_2d = meshgrid(lon_reg, lat_reg) dlon_2d, dlat_2d = meshgrid(dlon, dlat) # Calculate differentials in Cartesian space dx = r * cos(lat_reg_2d * deg2rad) * dlon_2d * deg2rad dy = r * dlat_2d * deg2rad # Set up arrays for result stress_x_reg_beg = zeros([num_lat, num_lon]) stress_y_reg_beg = zeros([num_lat, num_lon]) stress_x_reg_end = zeros([num_expts, num_lat, num_lon]) stress_y_reg_end = zeros([num_expts, num_lat, num_lon]) # For each element, check if a point on the regular lat-lon grid lies # within. If so, do barycentric interpolation to that point. for elm in elements: # Check if we are within domain of regular grid if amin(elm.lat) > lat_max: continue # Find largest regular longitude value west of Element tmp = nonzero(lon_reg > amin(elm.lon))[0] if len(tmp) == 0: # Element crosses the western boundary iW = 0 else: iW = tmp[0] - 1 # Find smallest regular longitude value east of Element tmp = nonzero(lon_reg > amax(elm.lon))[0] if len(tmp) == 0: # Element crosses the eastern boundary iE = num_lon else: iE = tmp[0] # Find largest regular latitude value south of Element tmp = nonzero(lat_reg > amin(elm.lat))[0] if len(tmp) == 0: # Element crosses the southern boundary jS = 0 else: jS = tmp[0] - 1 # Find smallest regular latitude value north of Element tmp = nonzero(lat_reg > amax(elm.lat))[0] if len(tmp) == 0: # Element crosses the northern boundary jN = num_lat else: jN = tmp[0] for i in range(iW + 1, iE): for j in range(jS + 1, jN): # There is a chance that the regular gridpoint at (i,j) # lies within this element lon0 = lon_reg[i] lat0 = lat_reg[j] if in_triangle(elm, lon0, lat0): # Get area of entire triangle area = triangle_area(elm.lon, elm.lat) # Get area of each sub-triangle formed by # (lon0, lat0) area0 = triangle_area([lon0, elm.lon[1], elm.lon[2]], [lat0, elm.lat[1], elm.lat[2]]) area1 = triangle_area([lon0, elm.lon[0], elm.lon[2]], [lat0, elm.lat[0], elm.lat[2]]) area2 = triangle_area([lon0, elm.lon[0], elm.lon[1]], [lat0, elm.lat[0], elm.lat[1]]) # Find fractional area of each cff = [area0 / area, area1 / area, area2 / area] # 1996-2005 # Find value of stress_x and stress_y at each Node vals_x = [] vals_y = [] for n in range(3): vals_x.append(stress_x_beg[elm.nodes[n].id]) vals_y.append(stress_y_beg[elm.nodes[n].id]) # Barycentric interpolation to lon0, lat0 stress_x_reg_beg[j, i] = sum(array(cff) * array(vals_x)) stress_y_reg_beg[j, i] = sum(array(cff) * array(vals_y)) # RCPs for expt in range(num_expts): vals_x = [] vals_y = [] for n in range(3): vals_x.append(stress_x_end[expt, elm.nodes[n].id]) vals_y.append(stress_y_end[expt, elm.nodes[n].id]) stress_x_reg_end[expt, j, i] = sum(array(cff) * array(vals_x)) stress_y_reg_end[expt, j, i] = sum(array(cff) * array(vals_y)) print 'Calculating curl' # 1996-2005 # First calculate the two derivatives dv_dx = zeros(shape(stress_x_reg_beg)) du_dy = zeros(shape(stress_x_reg_beg)) # Forward difference approximation dv_dx[:, :-1] = (stress_y_reg_beg[:, 1:] - stress_y_reg_beg[:, :-1]) / dx[:, :-1] du_dy[:-1, :] = (stress_x_reg_beg[1:, :] - stress_x_reg_beg[:-1, :]) / dy[:-1, :] # Backward difference for the last row dv_dx[:, -1] = (stress_y_reg_beg[:, -1] - stress_y_reg_beg[:, -2]) / dx[:, -1] du_dy[-1, :] = (stress_x_reg_beg[-1, :] - stress_x_reg_beg[-2, :]) / dy[-1, :] curl_beg = dv_dx - du_dy # RCPs curl_end_tmp = zeros(shape(stress_x_reg_end)) for expt in range(num_expts): dv_dx = zeros(shape(stress_x_reg_beg)) du_dy = zeros(shape(stress_x_reg_beg)) dv_dx[:, :-1] = (stress_y_reg_end[expt, :, 1:] - stress_y_reg_end[expt, :, :-1]) / dx[:, :-1] du_dy[:-1, :] = (stress_x_reg_end[expt, 1:, :] - stress_x_reg_end[expt, :-1, :]) / dy[:-1, :] dv_dx[:, -1] = (stress_y_reg_end[expt, :, -1] - stress_y_reg_end[expt, :, -2]) / dx[:, -1] du_dy[-1, :] = (stress_x_reg_end[expt, -1, :] - stress_x_reg_end[expt, -2, :]) / dy[-1, :] curl_end_tmp[expt, :, :] = dv_dx - du_dy print 'Plotting zonal averages' # Calculate zonal averages curl_beg_avg = mean(curl_beg, axis=1) curl_end_avg = mean(curl_end_tmp, axis=2) # Plot zonal averages fig, ax = subplots(figsize=(10, 6)) ax.plot(curl_beg_avg, lat_reg, label='1996-2005', color='black', linewidth=2) for expt in range(num_expts): ax.plot(curl_end_avg[expt, :], lat_reg, label=expt_names[expt], color=colours[expt], linewidth=2) title('Curl of wind stress (2091-2100)', fontsize=18) xlabel(r'N/m$^3$', fontsize=14) ylabel('latitude', fontsize=14) ylim([lat_min, lat_max]) grid(True) # Move plot over to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Make legend ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fig.show() fig.savefig('windstress_curl_rcp.png') # Plot anomalies in zonal averages fig, ax = subplots(figsize=(10, 6)) for expt in range(num_expts): ax.plot(curl_end_avg[expt, :] - curl_beg_avg, lat_reg, label=expt_names[expt], color=colours[expt], linewidth=2) title('Anomalies in curl of wind stress (2091-2100 minus 1996-2005)', fontsize=18) xlabel(r'N/m$^3$', fontsize=14) ylabel('latitude', fontsize=14) ylim([lat_min, lat_max]) grid(True) # Move plot over to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Make legend ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fig.show() fig.savefig('windstress_curl_diff_rcp.png') # Plot percent change in zonal averages fig, ax = subplots(figsize=(10, 6)) for expt in range(num_expts): ax.plot((curl_end_avg[expt, :] - curl_beg_avg) / curl_beg_avg * 100, lat_reg, label=expt_names[expt], color=colours[expt], linewidth=2) title('Percent change in curl of wind stress (2091-2100 minus 1996-2005)', fontsize=18) xlabel('%', fontsize=14) ylabel('latitude', fontsize=14) xlim([-20, 20]) ylim([-65.5, -58]) grid(True) # Move plot over to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Make legend ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fig.show() fig.savefig('windstress_curl_percent_rcp.png') print 'Plotting 2D fields' # First mask out regions above threshold at beginning curl_beg = ma.masked_where(curl_beg > threshold, curl_beg) curl_end = ma.empty(shape(curl_end_tmp)) for expt in range(num_expts): curl_end[expt, :, :] = ma.masked_where(curl_beg > threshold, curl_end_tmp[expt, :, :]) # Calculate percent change for each RCP percent_change = ma.empty(shape(curl_end)) for expt in range(num_expts): percent_change[expt, :, :] = (curl_end[expt, :, :] - curl_beg) / curl_beg * 100 # 1996-2005 fig, ax = subplots(figsize=(10, 6)) bound = 1e-6 lev = linspace(-bound, bound, num=50) img = ax.contourf(lon_reg, lat_reg, curl_beg, lev, cmap='RdBu_r') xlabel('Longitude') ylabel('Latitude') xlim([lon_min, lon_max]) ylim([lat_min, lat_max]) title('Wind stress curl, 1996-2005 (N/m^3)', fontsize=18) colorbar(img) fig.show() fig.savefig('windstress_curl_2D_beg.png') for expt in range(num_expts): bound = 50 lev = linspace(-bound, bound, num=50) fig, ax = subplots(figsize=(10, 6)) img = ax.contourf(lon_reg, lat_reg, percent_change[expt, :, :], lev, cmap='RdBu_r') xlabel('Longitude') ylabel('Latitude') xlim([lon_min, lon_max]) ylim([lat_min, lat_max]) title( 'Percent change in wind stress curl, 2091-2100 versus 1996-2005 (' + expt_names[expt] + ')', fontsize=18) colorbar(img) fig.show() fig.savefig('windstress_curl_2D_percent_' + expt_filenames[expt] + '.png')
def hssw_aabw_distribution (): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = ['/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/', '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/', '/short/y99/kaa561/FESOM/highres_spinup/'] file_beg = 'annual_avg.oce.mean.1996.2005.nc' file_end = 'annual_avg.oce.mean.2091.2100.nc' # Titles for plotting expt_names = ['RCP 4.5 MMM', 'RCP 4.5 ACCESS', 'RCP 8.5 MMM', 'RCP 8.5 ACCESS', 'CONTROL'] num_expts = len(directories) # Mesh parameters circumpolar = False cross_180 = False # Northern boundary of water masses to consider nbdry = -65 # Number of temperature and salinity bins num_bins = 1000 # Bounds on temperature and salinity bins (pre-computed, change if needed) min_salt = 32.3 max_salt = 35.1 min_temp = -3.1 max_temp = 3.8 # Bounds to plot for HSSW and AABW hssw_salt_bounds = [34.3, 35] hssw_temp_bounds = [-2.25, -1.25] aabw_salt_bounds = [34.55, 34.8] aabw_temp_bounds = [-1, 2.5] # More readable labels hssw_salt_ticks = arange(34.3, 35+0.1, 0.1) hssw_salt_labels = ['', '', '34.5', '', '', '', '', '35'] hssw_temp_ticks = arange(-2.25, -1.25+0.25, 0.25) hssw_temp_labels = ['', '-2', '', '-1.5', ''] aabw_salt_ticks = arange(34.55, 34.8+0.05, 0.05) aabw_salt_labels = ['', '34.6', '', '34.7', '', ''] aabw_temp_ticks = arange(-1, 2.5+0.5, 0.5) aabw_temp_labels = ['', '', '0', '', '1', '', '2', ''] print 'Setting up bins' # Calculate boundaries of temperature bins temp_bins = linspace(min_temp, max_temp, num=num_bins) # Calculate centres of temperature bins (for plotting) temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:]) # Repeat for salinity salt_bins = linspace(min_salt, max_salt, num=num_bins) salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:]) # Set up a 3D array of experiment x temperature bins x salinity bins to # increment with volume of water masses ts_vals = zeros([num_expts+1, size(temp_centres), size(salt_centres)]) # Calculate surface freezing point as a function of salinity as seen by # sea ice model freezing_pt = -0.0575*salt_centres + 1.7105e-3*sqrt(salt_centres**3) - 2.155e-4*salt_centres**2 print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Reading data' # 1996-2005 id = Dataset(directory_beg + file_beg) n3d = id.variables['temp'].shape[1] temp_nodes = empty([num_expts+1, n3d]) salt_nodes = empty([num_expts+1, n3d]) temp_nodes[0,:] = id.variables['temp'][0,:] salt_nodes[0,:] = id.variables['salt'][0,:] id.close() # Loop over RCPs for expt in range(num_expts): id = Dataset(directories[expt] + file_end) temp_nodes[expt+1,:] = id.variables['temp'][0,:] salt_nodes[expt+1,:] = id.variables['salt'][0,:] id.close() print 'Binning elements' for elm in elements: # See if we're in the region of interest if all(elm.lat < nbdry): # Get area of 2D triangle area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None: # We've reached the bottom break # Calculate average temperature and salinity for each # experiment, as well as layer thickness, over this 3D # triangular prism. temp_vals = empty([num_expts+1, 6]) salt_vals = empty([num_expts+1, 6]) dz = empty(3) for i in range(3): # Loop over experiments for expt in range(num_expts+1): # Average temperature over 6 nodes temp_vals[expt,i] = temp_nodes[expt,nodes[i].id] temp_vals[expt,i+3] = temp_nodes[expt,nodes[i].below.id] salt_vals[expt,i] = salt_nodes[expt,nodes[i].id] salt_vals[expt,i+3] = salt_nodes[expt,nodes[i].below.id] # Average dz over 3 vertical edges dz[i] = abs(nodes[i].depth - nodes[i].below.depth) # Get ready for next repetition of loop nodes[i] = nodes[i].below temp_elm = mean(temp_vals, axis=1) salt_elm = mean(salt_vals, axis=1) # Calculate volume of 3D triangular prism volume = area*mean(dz) # Loop over experiments again for expt in range(num_expts+1): # Figure out which bins this falls into temp_index = nonzero(temp_bins > temp_elm[expt])[0][0] - 1 salt_index = nonzero(salt_bins > salt_elm[expt])[0][0] - 1 # Increment bins with volume ts_vals[expt, temp_index, salt_index] += volume # Mask bins with zero volume ts_vals = ma.masked_where(ts_vals==0, ts_vals) # Find the volume bounds for plotting min_val = log(amin(ts_vals)) max_val = log(amax(ts_vals)) print 'Plotting' fig = figure(figsize=(20,11)) # HSSW gs_a = GridSpec(1,num_expts+1) gs_a.update(left=0.05, right=0.98, bottom=0.66, top=0.86, wspace=0.16) for expt in range(num_expts+1): ax = subplot(gs_a[0,expt]) # Log scale is more visible img = pcolor(salt_centres, temp_centres, log(ts_vals[expt,:,:]), vmin=min_val, vmax=max_val, cmap='jet') plot(salt_centres, freezing_pt, color='black', linestyle='dashed', linewidth=2) grid(True) xlim(hssw_salt_bounds) ylim(hssw_temp_bounds) ax.set_xticks(hssw_salt_ticks) ax.set_xticklabels(hssw_salt_labels) ax.set_yticks(hssw_temp_ticks) ax.set_yticklabels(hssw_temp_labels) ax.tick_params(axis='x', labelsize=18) ax.tick_params(axis='y', labelsize=18) # Labels and titles if expt == 0: xlabel('Salinity (psu)', fontsize=20) ylabel(r'Temperature ($^{\circ}$C)', fontsize=20) title('1996-2005', fontsize=22) elif expt == 1: title('(2091-2100)\n' + expt_names[expt-1], fontsize=22) else: title(expt_names[expt-1], fontsize=22) # HSSW title if expt == 2: text(34.83, hssw_temp_bounds[1]+0.2, 'a) HSSW', ha='left', fontsize=30) # AABW gs_b = GridSpec(1,num_expts+1) gs_b.update(left=0.05, right=0.98, bottom=0.12, top=0.54, wspace=0.16) for expt in range(num_expts+1): ax = subplot(gs_b[0,expt]) img = pcolor(salt_centres, temp_centres, log(ts_vals[expt,:,:]), vmin=min_val, vmax=max_val, cmap='jet') grid(True) xlim(aabw_salt_bounds) ylim(aabw_temp_bounds) ax.set_xticks(aabw_salt_ticks) ax.set_xticklabels(aabw_salt_labels) ax.set_yticks(aabw_temp_ticks) ax.set_yticklabels(aabw_temp_labels) ax.tick_params(axis='x', labelsize=18) ax.tick_params(axis='y', labelsize=18) if expt == 0: xlabel('Salinity (psu)', fontsize=20) ylabel(r'Temperature ($^{\circ}$C)', fontsize=20) title('1996-2005', fontsize=22) elif expt == 1: title('(2091-2100)\n' + expt_names[expt-1], fontsize=22) else: title(expt_names[expt-1], fontsize=22) # AABW title if expt == 2: text(34.71, aabw_temp_bounds[1]+0.4, 'b) AABW', ha='left', fontsize=30) # Horizontal colourbar at the bottom if expt == num_expts: cbaxes = fig.add_axes([0.35, 0.06, 0.3, 0.02]) cbar = colorbar(img, cax=cbaxes, orientation='horizontal', ticks=arange(18, 30+2, 2)) cbar.ax.tick_params(labelsize=18) text(0.5, 0.01, 'log of volume', fontsize=20, transform=fig.transFigure, ha='center') # Main title suptitle(r'Water masses south of 65$^{\circ}$S', fontsize=30) fig.show() fig.savefig('hssw_aabw_distribution.png')
def interpolate_nick_climatology(melt_file, temp_file, out_file): nick_grid = '/short/y99/kaa561/nick_interpolation/lonlatPISM.nc' mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' # Read Nick's lat and lon points id = Dataset(nick_grid, 'r') nick_lat = id.variables['lat'][:, :] nick_lon = id.variables['lon'][:, :] id.close() # Set up arrays for interpolated melt rate and surface temp melt_reg = ma.empty(shape(nick_lat)) temp_reg = ma.empty(shape(nick_lat)) # Fill with NaNs melt_reg[:, :] = NaN temp_reg[:, :] = NaN # Build FESOM mesh elements = fesom_grid(mesh_path, circumpolar=True, cross_180=True) # Read melt rate and temperature on FESOM mesh id = Dataset(melt_file, 'r') melt_nodes = mean(id.variables['wnet'][:, :], axis=0) id.close() id = Dataset(temp_file, 'r') temp_nodes = mean(id.variables['temp'][:, :], axis=0) id.close() # Loop over all cavity elements for elm in elements: if elm.cavity: # Find all grid points which may fall within this triangle tmp = where( (nick_lat >= amin(elm.lat)) * (nick_lat <= amax(elm.lat)) * (nick_lon >= amin(elm.lon)) * (nick_lon <= amax(elm.lon))) j_vals = tmp[0] i_vals = tmp[1] # Loop over each such grid point for point in range(len(j_vals)): j = j_vals[point] i = i_vals[point] lon0 = nick_lon[j, i] lat0 = nick_lat[j, i] if in_triangle(elm, lon0, lat0): # This point does fall in the triangle # Get area of entire triangle area = triangle_area(elm.lon, elm.lat) # Get area of each sub-triangle formed by (lon0, lat0) area0 = triangle_area([lon0, elm.lon[1], elm.lon[2]], [lat0, elm.lat[1], elm.lat[2]]) area1 = triangle_area([lon0, elm.lon[0], elm.lon[2]], [lat0, elm.lat[0], elm.lat[2]]) area2 = triangle_area([lon0, elm.lon[0], elm.lon[1]], [lat0, elm.lat[0], elm.lat[1]]) # Find fractional area of each cff = [area0 / area, area1 / area, area2 / area] # Find melt rate and temperature at each node melt_vals = [] temp_vals = [] for n in range(3): melt_vals.append(melt_nodes[elm.nodes[n].id]) # This is implicitly surface temp temp_vals.append(temp_nodes[elm.nodes[n].id]) # Barycentric interpolation to lon0, lat0 melt_reg[j, i] = sum(array(cff) * array(melt_vals)) temp_reg[j, i] = sum(array(cff) * array(temp_vals)) # Mask NaNs melt_reg = ma.masked_where(isnan(melt_reg), melt_reg) temp_reg = ma.masked_where(isnan(temp_reg), temp_reg) # Conversions # m/s to mm/s melt_reg *= 1e3 # C to K temp_reg += 273.15 # Output to NetCDF file id = Dataset(out_file, 'w') id.createDimension('y', size(nick_lat, 0)) id.createDimension('x', size(nick_lat, 1)) id.createDimension('time', None) id.createVariable('longitude', 'f8', ('y', 'x')) id.variables['longitude'][:, :] = nick_lon id.createVariable('latitude', 'f8', ('y', 'x')) id.variables['latitude'][:, :] = nick_lat id.createVariable('melt', 'f8', ('y', 'x')) id.variables['melt'].units = 'mm/s' id.variables['melt'][:, :] = melt_reg id.createVariable('temp', 'f8', ('y', 'x')) id.variables['temp'].units = 'K' id.variables['temp'][:, :] = temp_reg id.close()
def timeseries_watermass_sectors(mesh_path, output_path, start_year, end_year, log_file, fig_dir=''): # Titles and figure names for each sector sector_names = [ 'Filchner-Ronne Ice Shelf Cavity', 'Eastern Weddell Region Cavities', 'Amery Ice Shelf Cavity', 'Australian Sector Cavities', 'Ross Sea Cavities', 'Amundsen Sea Cavities', 'Bellingshausen Sea Cavities', 'Larsen Ice Shelf Cavities', 'All Ice Shelf Cavities' ] fig_names = [ 'filchner_ronne_watermass.png', 'eweddell_watermass.png', 'amery_watermass.png', 'australian_watermass.png', 'ross_watermass.png', 'amundsen_watermass.png', 'bellingshausen_watermass.png', 'larsen_watermass.png', 'total_antarctica_watermass.png' ] num_sectors = len(sector_names) # Water masses to consider wm_names = ['ISW', 'HSSW', 'LSSW', 'AASW', 'MCDW', 'CDW'] num_watermasses = len(wm_names) wm_colours = ['cyan', 'black', 'blue', 'green', 'magenta', 'red'] # Only consider elements south of 30S circumpolar = True # Don't make second copies of elements that cross 180E cross_180 = False # Naming conventions for FESOM output files file_head = output_path + 'MK44005.' file_tail = '.oce.mean.nc' num_years = end_year - start_year + 1 prev_years = 0 # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' # First just figure out how many years are in the log file f = open(log_file, 'r') f.readline() for line in f: try: tmp = float(line) prev_years += 1 except (ValueError): break f.close() # Now set up array of water mass proportions in each sector percent_watermass = empty( [num_watermasses, num_sectors, prev_years + num_years]) # Fill the first prev_years f = open(log_file, 'r') f.readline() wm_key = 0 while wm_key < num_watermasses: sector = 0 while sector < num_sectors: year = 0 for line in f: try: percent_watermass[wm_key, sector, year] = float(line) year += 1 except (ValueError): break sector += 1 wm_key += 1 f.close() else: # Set up empty array for water mass proportions percent_watermass = empty([num_watermasses, num_sectors, num_years]) print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Categorising elements into sectors' location_flag = zeros([num_sectors, len(elements)]) for i in range(len(elements)): elm = elements[i] # Make sure we're actually in an ice shelf cavity if elm.cavity: # Figure out which sector this ice shelf element falls into lon = mean(elm.lon) lat = mean(elm.lat) if lon >= -85 and lon < -30 and lat < -74: # Filchner-Ronne location_flag[0, i] = 1 elif lon >= -30 and lon < 65: # Eastern Weddell region location_flag[1, i] = 1 elif lon >= 65 and lon < 76: # Amery location_flag[2, i] = 1 elif lon >= 76 and lon < 165 and lat >= -74: # Australian sector location_flag[3, i] = 1 elif (lon >= 155 and lon < 165 and lat < -74) or (lon >= 165) or (lon < -140): # Ross Sea location_flag[4, i] = 1 elif (lon >= -140 and lon < -105) or (lon >= -105 and lon < -98 and lat < -73.1): # Amundsen Sea location_flag[5, i] = 1 elif (lon >= -104 and lon < -98 and lat >= -73.1) or (lon >= -98 and lon < -66 and lat >= -75): # Bellingshausen Sea location_flag[6, i] = 1 elif lon >= -66 and lon < -59 and lat >= -74: # Larsen Ice Shelves location_flag[7, i] = 1 else: print 'No region found for lon=', str(lon), ', lat=', str(lat) break #return # All ice shelf elements are in Total Antarctica location_flag[8, i] = 1 print 'Calculating water mass breakdown' # Loop over years for year in range(start_year, end_year + 1): print 'Processing year ' + str(year) # Initialise volume of each water mass in each sector vol_watermass = zeros([num_watermasses, num_sectors]) # Read temperature and salinity for this year, annually average id = Dataset(file_head + str(year) + file_tail, 'r') temp = mean(id.variables['temp'][:, :], axis=0) salt = mean(id.variables['salt'][:, :], axis=0) id.close() # Loop over elements for i in range(len(elements)): elm = elements[i] # Check if we're in an ice shelf cavity if elm.cavity: # Get area of 2D element area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[ 1].below is None or nodes[2].below is None: # Reached the bottom break # Calculate average temperature, salinity, and # layer thickness for this 3D triangular prism temp_vals = [] salt_vals = [] dz_vals = [] for n in range(3): temp_vals.append(temp[nodes[n].id]) salt_vals.append(salt[nodes[n].id]) temp_vals.append(temp[nodes[n].below.id]) salt_vals.append(salt[nodes[n].below.id]) dz_vals.append( abs(nodes[n].depth - nodes[n].below.depth)) # Get ready for next iteration of loop nodes[n] = nodes[n].below curr_temp = mean(array(temp_vals)) curr_salt = mean(array(salt_vals)) curr_volume = area * mean(array(dz_vals)) # Get surface freezing point at this salinity curr_tfrz = -0.0575 * curr_salt + 1.7105e-3 * sqrt( curr_salt**3) - 2.155e-4 * curr_salt**2 # Figure out what water mass this is if curr_temp < curr_tfrz: # ISW wm_key = 0 elif curr_salt < 34: # AASW wm_key = 3 elif curr_temp > 0: # CDW wm_key = 5 elif curr_temp > -1.5: # MCDW wm_key = 4 elif curr_salt < 34.5: # LSSW wm_key = 2 else: # HSSW wm_key = 1 # Integrate its volume for sector(s) the element is in curr_sectors = 0 for sector in range(num_sectors): if location_flag[sector, i] == 1: curr_sectors += 1 vol_watermass[wm_key, sector] += curr_volume # Should be in exactly 2 sectors (1 + total Antarctica) if curr_sectors != 2: print 'Wrong number of sectors for element ' + str(i) if year == start_year: # Find the total volume of each sector by adding up the volume # of each water mass. Only need to do this once because shouldn't # change over time. vol_sectors = sum(vol_watermass, axis=0) # Calculate percentage of each water mass in each sector for wm_key in range(num_watermasses): for sector in range(num_sectors): percent_watermass[wm_key, sector, year - start_year + prev_years] = vol_watermass[ wm_key, sector] / vol_sectors[sector] * 100 # Make time axis time = range(start_year - prev_years, end_year + 1) print 'Plotting' # One plot for each sector for sector in range(num_sectors): fig = figure() ax = fig.add_subplot(1, 1, 1) # Loop over water masses for wm_key in range(num_watermasses): plot(time, percent_watermass[wm_key, sector, :], color=wm_colours[wm_key], label=wm_names[wm_key], linewidth=2) xlabel('year') ylabel('percent volume') xlim([start_year - prev_years, end_year]) title(sector_names[sector]) grid(True) # Move plot over to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Make legend ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fig.savefig(fig_dir + fig_names[sector]) print 'Saving results to log file' f = open(log_file, 'w') for wm_key in range(num_watermasses): for sector in range(num_sectors): f.write(wm_names[wm_key] + 'in ' + sector_names[sector] + '(%)\n') for t in range(prev_years + num_years): f.write(str(percent_watermass[wm_key, sector, t]) + '\n') f.close()
def ross_plots (): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' forcing_file_beg = '/short/y99/kaa561/FESOM/highres_spinup/annual_avg.forcing.diag.1996.2005.nc' forcing_file_end = '/short/y99/kaa561/FESOM/rcp85_A/annual_avg.forcing.diag.2091.2100.nc' forcing_file_2094 = '/short/y99/kaa561/FESOM/rcp85_A/annual_avg.forcing.diag.2094.nc' oce_file_beg = '/short/y99/kaa561/FESOM/highres_spinup/annual_avg.oce.mean.1996.2005.nc' oce_file_end = '/short/y99/kaa561/FESOM/rcp85_A/annual_avg.oce.mean.2091.2100.nc' oce_file_2094 = '/short/y99/kaa561/FESOM/rcp85_A/annual_avg.oce.mean.2094.nc' oce2_file_beg = '/short/y99/kaa561/FESOM/highres_spinup/seasonal_climatology_oce_1996_2005.nc' oce2_file_end = '/short/y99/kaa561/FESOM/rcp85_A/seasonal_climatology_oce_2091_2100.nc' oce2_file_2094 = '/short/y99/kaa561/FESOM/rcp85_A/seasonal_climatology_oce_2094.nc' ice_file_beg = '/short/y99/kaa561/FESOM/highres_spinup/seasonal_climatology_ice_1996_2005.nc' ice_file_end = '/short/y99/kaa561/FESOM/rcp85_A/seasonal_climatology_ice_2091_2100.nc' ice_file_2094 = '/short/y99/kaa561/FESOM/rcp85_A/seasonal_climatology_ice_2094.nc' # Bounds on plot (in polar coordinate transformation) x_min = -5.5 x_max = 4 y_min = -13.8 y_max = -4.75 # Plotting parameters circumpolar = True # Season names for plot titles season_names = ['DJF', 'MAM', 'JJA', 'SON'] # Degrees to radians conversion factor deg2rad = pi/180.0 # Seconds per year sec_per_year = 365.25*24*3600 print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar) # Build one set of plotting patches with all elements, one with # ice shelf cavities masked, and one with open ocean masked patches_all = [] patches_ice = [] patches_ocn = [] for elm in elements: coord = transpose(vstack((elm.x, elm.y))) patches_all.append(Polygon(coord, True, linewidth=0.)) if elm.cavity: patches_ice.append(Polygon(coord, True, linewidth=0.)) else: patches_ocn.append(Polygon(coord, True, linewidth=0.)) num_elm = len(patches_all) num_elm_ice = len(patches_ice) num_elm_ocn = len(patches_ocn) # Build ice shelf front contours contour_lines = [] for elm in elements: # Select elements where exactly 2 of the 3 nodes are in a cavity if count_nonzero(elm.cavity_nodes) == 2: # Save the coastal flags and x- and y- coordinates of these 2 coast_tmp = [] x_tmp = [] y_tmp = [] for i in range(3): if elm.cavity_nodes[i]: coast_tmp.append(elm.coast_nodes[i]) x_tmp.append(elm.x[i]) y_tmp.append(elm.y[i]) # Select elements where at most 1 of these 2 nodes are coastal if count_nonzero(coast_tmp) < 2: # Draw a line between the 2 nodes contour_lines.append([(x_tmp[0], y_tmp[0]), (x_tmp[1], y_tmp[1])]) # Set up a grey square to fill the background with land x_reg, y_reg = meshgrid(linspace(x_min, x_max, num=100), linspace(y_min, y_max, num=100)) land_square = zeros(shape(x_reg)) print 'Processing ice shelf melt rate' # Read annually averaged data, and convert from m/s to m/y id = Dataset(forcing_file_beg, 'r') wnet_nodes_beg = id.variables['wnet'][0,:]*sec_per_year id.close() id = Dataset(forcing_file_end, 'r') # Get difference from beginning wnet_nodes_end_diff = id.variables['wnet'][0,:]*sec_per_year - wnet_nodes_beg id.close() id = Dataset(forcing_file_2094, 'r') wnet_nodes_2094_diff = id.variables['wnet'][0,:]*sec_per_year - wnet_nodes_beg id.close() # Now average over each cavity element ismr_beg = [] ismr_end_diff = [] ismr_2094_diff = [] for elm in elements: if elm.cavity: ismr_beg.append(mean([wnet_nodes_beg[elm.nodes[0].id], wnet_nodes_beg[elm.nodes[1].id], wnet_nodes_beg[elm.nodes[2].id]])) ismr_end_diff.append(mean([wnet_nodes_end_diff[elm.nodes[0].id], wnet_nodes_end_diff[elm.nodes[1].id], wnet_nodes_end_diff[elm.nodes[2].id]])) ismr_2094_diff.append(mean([wnet_nodes_2094_diff[elm.nodes[0].id], wnet_nodes_2094_diff[elm.nodes[1].id], wnet_nodes_2094_diff[elm.nodes[2].id]])) # Figure out bounds for colour scale # Min and max of beginning # Initialise with something impossible var_min = amax(array(ismr_beg)) var_max = amin(array(ismr_beg)) # Modify as needed i = 0 for elm in elements: if elm.cavity: if any(elm.x >= x_min) and any(elm.x <= x_max) and any(elm.y >= y_min) and any(elm.y <= y_max): if ismr_beg[i] < var_min: var_min = ismr_beg[i] if ismr_beg[i] > var_max: var_max = ismr_beg[i] i += 1 # Max absolute difference diff_max = 0 i = 0 for elm in elements: if elm.cavity: if any(elm.x >= x_min) and any(elm.x <= x_max) and any(elm.y >= y_min) and any(elm.y <= y_max): if abs(ismr_end_diff[i]) > diff_max: diff_max = abs(ismr_end_diff[i]) if abs(ismr_2094_diff[i]) > diff_max: diff_max = abs(ismr_2094_diff[i]) i += 1 # Special colour map for absolute melt change_points = [0.5, 2, 3.5] if var_min < 0: # There is refreezing here; include blue for elements < 0 cmap_vals = array([var_min, 0, change_points[0], change_points[1], change_points[2], var_max]) cmap_colors = [(0.26, 0.45, 0.86), (1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)] cmap_vals_norm = (cmap_vals - var_min)/(var_max - var_min) cmap_vals_norm[-1] = 1 cmap_list = [] for i in range(size(cmap_vals)): cmap_list.append((cmap_vals_norm[i], cmap_colors[i])) mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list) else: # No refreezing cmap_vals = array([0, change_points[0], change_points[1], change_points[2], var_max]) cmap_colors = [(1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)] cmap_vals_norm = cmap_vals/var_max cmap_vals_norm[-1] = 1 cmap_list = [] for i in range(size(cmap_vals)): cmap_list.append((cmap_vals_norm[i], cmap_colors[i])) mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list) # Plot fig = figure(figsize=(22,7)) # 1996-2005 ax = fig.add_subplot(1, 3, 1, aspect='equal') # Start with land background contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) # Add ice shelf elements img = PatchCollection(patches_ice, cmap=mf_cmap) img.set_array(array(ismr_beg)) img.set_edgecolor('face') img.set_clim(vmin=var_min, vmax=var_max) ax.add_collection(img) # Mask out the open ocean in white overlay = PatchCollection(patches_ocn, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('1996-2005', fontsize=20) # Colourbar on the left cbaxes = fig.add_axes([0.05, 0.25, 0.02, 0.5]) cbar = colorbar(img, cax=cbaxes) # 2091-2100 ax = fig.add_subplot(1, 3, 2, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_ice, cmap='RdBu_r') img.set_array(array(ismr_end_diff)) img.set_edgecolor('face') img.set_clim(vmin=-diff_max, vmax=diff_max) ax.add_collection(img) overlay = PatchCollection(patches_ocn, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('2091-2100 anomalies', fontsize=20) # 2094 ax = fig.add_subplot(1, 3, 3, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_ice, cmap='RdBu_r') img.set_array(array(ismr_2094_diff)) img.set_edgecolor('face') img.set_clim(vmin=-diff_max, vmax=diff_max) ax.add_collection(img) overlay = PatchCollection(patches_ocn, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('2094 anomalies', fontsize=20) # Colourbar on the right cbaxes = fig.add_axes([0.92, 0.25, 0.02, 0.5]) cbar = colorbar(img, cax=cbaxes) suptitle('Ice shelf melt rate (m/y)', fontsize=24) subplots_adjust(wspace=0.02, hspace=0.025) fig.show() fig.savefig('ross_melt.png') print 'Processing bottom water temperature' # Read annually averaged data id = Dataset(oce_file_beg, 'r') temp_nodes_beg = id.variables['temp'][0,:] id.close() id = Dataset(oce_file_end, 'r') temp_nodes_end = id.variables['temp'][0,:] id.close() id = Dataset(oce_file_2094, 'r') temp_nodes_2094 = id.variables['temp'][0,:] id.close() # Now average bottom node temperatures over each element bwtemp_beg = [] bwtemp_end = [] bwtemp_2094 = [] for elm in elements: bwtemp_beg.append(mean([temp_nodes_beg[elm.nodes[0].find_bottom().id], temp_nodes_beg[elm.nodes[1].find_bottom().id], temp_nodes_beg[elm.nodes[2].find_bottom().id]])) bwtemp_end.append(mean([temp_nodes_end[elm.nodes[0].find_bottom().id], temp_nodes_end[elm.nodes[1].find_bottom().id], temp_nodes_end[elm.nodes[2].find_bottom().id]])) bwtemp_2094.append(mean([temp_nodes_2094[elm.nodes[0].find_bottom().id], temp_nodes_2094[elm.nodes[1].find_bottom().id], temp_nodes_2094[elm.nodes[2].find_bottom().id]])) # Plot fig = figure(figsize=(22,7)) # 1996-2005 ax = fig.add_subplot(1, 3, 1, aspect='equal') # Start with land background contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) # Add all ocean elements img = PatchCollection(patches_all, cmap='jet') img.set_array(array(bwtemp_beg)) img.set_edgecolor('face') img.set_clim(vmin=-2, vmax=-0.5) ax.add_collection(img) # Contour ice shelf fronts contours = LineCollection(contour_lines, edgecolor='black', linewidth=1) ax.add_collection(contours) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('1996-2005', fontsize=20) # 2091-2100 ax = fig.add_subplot(1, 3, 2, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_all, cmap='jet') img.set_array(array(bwtemp_end)) img.set_edgecolor('face') img.set_clim(vmin=-2, vmax=-0.5) ax.add_collection(img) contours = LineCollection(contour_lines, edgecolor='black', linewidth=1) ax.add_collection(contours) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('2091-2100', fontsize=20) # 2094 ax = fig.add_subplot(1, 3, 3, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_all, cmap='jet') img.set_array(array(bwtemp_2094)) img.set_edgecolor('face') img.set_clim(vmin=-2, vmax=-0.5) ax.add_collection(img) contours = LineCollection(contour_lines, edgecolor='black', linewidth=1) ax.add_collection(contours) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title('2094', fontsize=20) # Horizontal colourbar below cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.02]) cbar = colorbar(img, orientation='horizontal', cax=cbaxes, extend='both') suptitle(r'Bottom water temperature ($^{\circ}$C)', fontsize=24) subplots_adjust(wspace=0.02, hspace=0.025) fig.show() fig.savefig('ross_bwtemp.png') print 'Processing seasonal SSTs' # Read seasonally averaged data id = Dataset(oce2_file_beg, 'r') sst_nodes_beg = id.variables['temp'][:,:] id.close() id = Dataset(oce2_file_end, 'r') sst_nodes_end = id.variables['temp'][:,:] id.close() id = Dataset(oce2_file_2094, 'r') sst_nodes_2094 = id.variables['temp'][:,:] id.close() # Now average surface nodes over each non-cavity element sst_beg = empty([4, num_elm_ocn]) sst_end = empty([4, num_elm_ocn]) sst_2094 = empty([4, num_elm_ocn]) i = 0 for elm in elements: if not elm.cavity: sst_beg[:,i] = (sst_nodes_beg[:,elm.nodes[0].id] + sst_nodes_beg[:,elm.nodes[1].id] + sst_nodes_beg[:,elm.nodes[2].id])/3.0 sst_end[:,i] = (sst_nodes_end[:,elm.nodes[0].id] + sst_nodes_end[:,elm.nodes[1].id] + sst_nodes_end[:,elm.nodes[2].id])/3.0 sst_2094[:,i] = (sst_nodes_2094[:,elm.nodes[0].id] + sst_nodes_2094[:,elm.nodes[1].id] + sst_nodes_2094[:,elm.nodes[2].id])/3.0 i += 1 # Plot fig = figure(figsize=(19,11)) for season in range(4): # 1996-2005 ax = fig.add_subplot(3, 4, season+1, aspect='equal') # Start with land background contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) # Add open ocean elements img = PatchCollection(patches_ocn, cmap='jet') img.set_array(sst_beg[season,:]) img.set_edgecolor('face') img.set_clim(vmin=-1.8, vmax=1.5) ax.add_collection(img) # Mask out cavities in white overlay = PatchCollection(patches_ice, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title(season_names[season], fontsize=24) if season == 0: text(x_min-1, 0.5*(y_min+y_max), '1996-2005', fontsize=20, ha='center', rotation=90) # 2091-2100 ax = fig.add_subplot(3, 4, season+5, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_ocn, cmap='jet') img.set_array(sst_end[season,:]) img.set_edgecolor('face') img.set_clim(vmin=-1.8, vmax=1.5) ax.add_collection(img) overlay = PatchCollection(patches_ice, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) if season == 0: text(x_min-1, 0.5*(y_min+y_max), '2091-2100', fontsize=20, ha='center', rotation=90) # 2094 ax = fig.add_subplot(3, 4, season+9, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_ocn, cmap='jet') img.set_array(sst_2094[season,:]) img.set_edgecolor('face') img.set_clim(vmin=-1.8, vmax=1.5) ax.add_collection(img) overlay = PatchCollection(patches_ice, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) if season == 0: text(x_min-1, 0.5*(y_min+y_max), '2094', fontsize=20, ha='center', rotation=90) if season == 3: # Colourbar below cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.02]) cbar = colorbar(img, orientation='horizontal', cax=cbaxes, extend='both') suptitle(r'Sea surface temperature ($^{\circ}$C)', fontsize=24) subplots_adjust(wspace=0.025, hspace=0.025) fig.show() fig.savefig('ross_sst.png') print 'Processing seasonal sea ice concentration' # Read seasonally averaged data id = Dataset(ice_file_beg, 'r') aice_nodes_beg = id.variables['area'][:,:] id.close() id = Dataset(ice_file_end, 'r') aice_nodes_end = id.variables['area'][:,:] id.close() id = Dataset(ice_file_2094, 'r') aice_nodes_2094 = id.variables['area'][:,:] id.close() # Now average nodes over each non-cavity element aice_beg = empty([4, num_elm_ocn]) aice_end = empty([4, num_elm_ocn]) aice_2094 = empty([4, num_elm_ocn]) i = 0 for elm in elements: if not elm.cavity: aice_beg[:,i] = (aice_nodes_beg[:,elm.nodes[0].id] + aice_nodes_beg[:,elm.nodes[1].id] + aice_nodes_beg[:,elm.nodes[2].id])/3.0 aice_end[:,i] = (aice_nodes_end[:,elm.nodes[0].id] + aice_nodes_end[:,elm.nodes[1].id] + aice_nodes_end[:,elm.nodes[2].id])/3.0 aice_2094[:,i] = (aice_nodes_2094[:,elm.nodes[0].id] + aice_nodes_2094[:,elm.nodes[1].id] + aice_nodes_2094[:,elm.nodes[2].id])/3.0 i += 1 # Plot fig = figure(figsize=(19,11)) for season in range(4): # 1996-2005 ax = fig.add_subplot(3, 4, season+1, aspect='equal') # Start with land background contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) # Add open ocean elements img = PatchCollection(patches_ocn, cmap='jet') img.set_array(aice_beg[season,:]) img.set_edgecolor('face') img.set_clim(vmin=0, vmax=1) ax.add_collection(img) # Mask out cavities in white overlay = PatchCollection(patches_ice, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) title(season_names[season], fontsize=24) if season == 0: text(x_min-1, 0.5*(y_min+y_max), '1996-2005', fontsize=20, ha='left', rotation=90) # 2091-2100 ax = fig.add_subplot(3, 4, season+5, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_ocn, cmap='jet') img.set_array(aice_end[season,:]) img.set_edgecolor('face') img.set_clim(vmin=0, vmax=1) ax.add_collection(img) overlay = PatchCollection(patches_ice, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) if season == 0: text(x_min-1, 0.5*(y_min+y_max), '2091-2100', fontsize=20, ha='left', rotation=90) # 2094 ax = fig.add_subplot(3, 4, season+9, aspect='equal') contourf(x_reg, y_reg, land_square, 1, colors=(('0.6', '0.6', '0.6'))) img = PatchCollection(patches_ocn, cmap='jet') img.set_array(aice_2094[season,:]) img.set_edgecolor('face') img.set_clim(vmin=0, vmax=1) ax.add_collection(img) overlay = PatchCollection(patches_ice, facecolor=(1,1,1)) overlay.set_edgecolor('face') ax.add_collection(overlay) xlim([x_min, x_max]) ylim([y_min, y_max]) ax.set_xticks([]) ax.set_yticks([]) if season == 0: text(x_min-1, 0.5*(y_min+y_max), '2094', fontsize=20, ha='left', rotation=90) if season == 3: # Colourbar below cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.02]) cbar = colorbar(img, orientation='horizontal', cax=cbaxes) suptitle('Sea ice concentration', fontsize=24) subplots_adjust(wspace=0.025, hspace=0.025) fig.show() fig.savefig('ross_aice.png')
def timeseries_3D (mesh_path, ocn_file, log_file): circumpolar = True # Only consider elements south of 30S cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step rhoCp = 4.2e6 # Volumetric heat capacity of seawater (J/K/m^3) C2K = 273.15 # Celsius to Kelvin conversion ohc = [] avgsalt = [] tke = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: try: ohc.append(float(line)) except(ValueError): # Reached the header for the next variable break for line in f: try: avgsalt.append(float(line)) except(ValueError): break for line in f: tke.append(float(line)) f.close() print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) # Also read the depth of each node f = open(mesh_path + 'nod3d.out', 'r') f.readline() depth = [] for line in f: tmp = line.split() depth.append(float(tmp[3])) f.close() # Convert to pressure in bar press = abs(array(depth))/10.0 print 'Reading data' id = Dataset(ocn_file, 'r') num_time = id.variables['time'].shape[0] temp = id.variables['temp'][:,:] salt = id.variables['salt'][:,:] u = id.variables['u'][:,:] v = id.variables['v'][:,:] id.close() print 'Calculating density' rho = unesco(temp, salt, tile(press, (num_time,1))) print 'Setting up arrays' # First calculate volume of each element dV_e3d = [] # Loop over 2D elements for elm in elements: # Select the three nodes making up this element nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Calculate area of the surface triangle area = elm.area() # Loop downward through the water column while True: if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None: # We've reached the bottom break # Calculate volume as area * average depth dV_e3d.append(area*(abs(nodes[0].depth - nodes[0].below.depth) + abs(nodes[1].depth - nodes[1].below.depth) + abs(nodes[2].depth - nodes[2].below.depth))/3.0) # Update nodes for i in range(3): nodes[i] = nodes[i].below dV_e3d = array(dV_e3d) # Set up arrays for timeseries of variables at each 3D element temp_e3d = zeros([num_time,size(dV_e3d)]) salt_e3d = zeros([num_time,size(dV_e3d)]) rho_e3d = zeros([num_time,size(dV_e3d)]) u_e3d = zeros([num_time,size(dV_e3d)]) v_e3d = zeros([num_time,size(dV_e3d)]) # Loop over 2D elements again j = 0 for elm in elements: # Select the three nodes making up this element nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward through the water column while True: if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None: # We've reached the bottom break # Value of each variable in this triangular prism is the # average of the six vertices temp_e3d[:,j] = (temp[:,nodes[0].id] + temp[:,nodes[1].id] + temp[:,nodes[2].id] + temp[:,nodes[0].below.id] + temp[:,nodes[1].below.id] + temp[:,nodes[2].below.id])/6.0 salt_e3d[:,j] = (salt[:,nodes[0].id] + salt[:,nodes[1].id] + salt[:,nodes[2].id] + salt[:,nodes[0].below.id] + salt[:,nodes[1].below.id] + salt[:,nodes[2].below.id])/6.0 rho_e3d[:,j] = (rho[:,nodes[0].id] + rho[:,nodes[1].id] + rho[:,nodes[2].id] + rho[:,nodes[0].below.id] + rho[:,nodes[1].below.id] + rho[:,nodes[2].below.id])/6.0 u_e3d[:,j] = (u[:,nodes[0].id] + u[:,nodes[1].id] + u[:,nodes[2].id] + u[:,nodes[0].below.id] + u[:,nodes[1].below.id] + u[:,nodes[2].below.id])/6.0 v_e3d[:,j] = (v[:,nodes[0].id] + v[:,nodes[1].id] + v[:,nodes[2].id] + v[:,nodes[0].below.id] + v[:,nodes[1].below.id] + v[:,nodes[2].below.id])/6.0 # Update nodes for i in range(3): nodes[i] = nodes[i].below j += 1 print 'Building timeseries' for t in range(num_time): # Integrate temp*rhoCp*dV to get OHC ohc.append(sum((temp_e3d[t,:]+C2K)*rhoCp*dV_e3d)) # Average salinity (weighted with rho*dV) avgsalt.append(sum(salt_e3d[t,:]*rho_e3d[t,:]*dV_e3d)/sum(rho_e3d[t,:]*dV_e3d)) # Integrate 0.5*rho*speed^2*dV to get TKE tke.append(sum(0.5*rho_e3d[t,:]*(u_e3d[t,:]**2 + v_e3d[t,:]**2)*dV_e3d)) # Calculate time values time = arange(len(ohc))*days_per_output/365. print 'Plotting ocean heat content' clf() plot(time, ohc) xlabel('Years') ylabel('Southern Ocean Heat Content (J)') grid(True) savefig('ohc.png') print 'Plotting average salinity' clf() plot(time, avgsalt) xlabel('Years') ylabel('Southern Ocean Average Salinity (psu)') grid(True) savefig('avgsalt.png') print 'Plotting total kinetic energy' clf() plot(time, tke) xlabel('Years') ylabel('Southern Ocean Total Kinetic Energy (J)') grid(True) savefig('tke.png') print 'Saving results to log file' f = open(log_file, 'w') f.write('Southern Ocean Heat Content (J):\n') for elm in ohc: f.write(str(elm) + '\n') f.write('Southern Ocean Average Salinity (psu):\n') for elm in avgsalt: f.write(str(elm) + '\n') f.write('Southern Ocean Total Kinetic Energy (J):\n') for elm in tke: f.write(str(elm) + '\n') f.close()
def timeseries_massloss_depth(mesh_path, diag_file, log_file, fig_dir=''): # Bounds on depth classes draft_min = array([0, 250, 500]) draft_max = array([250, 500, 3000]) num_classes = size(draft_min) # Labels for legend labels = ['<' + str(draft_max[0]) + ' m'] for n in range(1, num_classes - 1): labels.append(str(draft_min[n]) + '-' + str(draft_max[n]) + ' m') labels.append('>' + str(draft_min[-1]) + ' m') circumpolar = True # Only consider elements south of 30S cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step rho_ice = 916 # Density of ice in kg/m^3 start_year = 1992 tmp_massloss = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: try: tmp_massloss.append(float(line)) except (ValueError): # Reached the header for the next variable break start_t = len(tmp_massloss) # Set up array for mass loss values for each depth class old_massloss = empty([num_classes, start_t]) # Fill in the first depth class old_massloss[0, :] = tmp_massloss[:] n = 1 # Loop over the other depth classes while n < num_classes: t = 0 for line in f: try: old_massloss[n, t] = float(line) t += 1 except (ValueError): # Reached the header for the next depth class break n += 1 else: start_t = 0 print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Reading data' id = Dataset(diag_file, 'r') num_time = id.variables['time'].shape[0] # Set up array of mass loss values massloss = empty([num_classes, start_t + num_time]) if exists(log_file): # Fill first start_t timesteps with existing values massloss[:, 0:start_t] = old_massloss[:, :] # Read melt rate and convert from m/s to m/y ismr = id.variables['wnet'][:, :] * 365.25 * 24 * 60 * 60 id.close() print 'Setting up arrays' # Melt rate timeseries at each element ismr_elm = zeros([num_time, len(elements)]) # Area of each element area_elm = zeros(len(elements)) # Flag to indicate which depth class the element is part of class_flag = zeros([num_classes, len(elements)]) # Loop over each element to fill these in for i in range(len(elements)): elm = elements[i] # Make sure we're actually in an ice shelf cavity if elm.cavity: # Average ice shelf melt rate timeseries over 3 component nodes ismr_elm[:, i] = (ismr[:, elm.nodes[0].id] + ismr[:, elm.nodes[1].id] + ismr[:, elm.nodes[2].id]) / 3 # Call area function area_elm[i] = elm.area() # Get ice shelf draft (average depth of surface nodes) draft = mean( array([(elm.nodes[0]).depth, (elm.nodes[1]).depth, (elm.nodes[2]).depth])) # Loop over depth classes found = False for n in range(num_classes): # Figure out whether or not this element is part of the given depth class if draft > draft_min[n] and draft <= draft_max[n]: found = True class_flag[n, i] = 1 if not found: print "Couldn't find a depth class for ice shelf draft " + str( draft) return # Calculate conversion factors from mass loss to area-averaged melt rate # for each depth class factors = empty(num_classes) for n in range(num_classes): # Calculate total ice shelf area in this class tmp_area = sum(area_elm * class_flag[n, :]) print 'Area of ice shelf draft between ' + str( draft_min[n]) + ' and ' + str( draft_max[n]) + 'm: ' + str(tmp_area) + ' m^2' factors[n] = 1e12 / (rho_ice * tmp_area) # Build timeseries for t in range(num_time): # Loop over depth classes for n in range(num_classes): # Integrate ice shelf melt rate over area to get volume loss volumeloss = sum(ismr_elm[t, :] * area_elm * class_flag[n, :]) # Convert to massloss in Gt/y massloss[n, start_t + t] = 1e-12 * rho_ice * volumeloss # Calculate time values time = arange(size(massloss, 1)) * days_per_output / 365. + start_year print "Plotting" # Start with mass loss fig, ax = subplots(figsize=(10, 6)) # One line for each depth class for n in range(num_classes): ax.plot(time, massloss[n, :], label=labels[n], linewidth=2) # Configure plot title('Basal Mass Loss', fontsize=18) xlabel('Year', fontsize=14) ylabel('Gt/y', fontsize=14) xlim([time[0], time[-1]]) grid(True) # Move the plot over to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Make legend ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fig.savefig(fig_dir + 'massloss_depth.png') # Repeat for average melt rate fig, ax = subplots(figsize=(10, 6)) for n in range(num_classes): ax.plot(time, massloss[n, :] * factors[n], label=labels[n], linewidth=2) # Configure plot title('Area-Averaged Ice Shelf Melt Rate', fontsize=18) xlabel('Year', fontsize=14) ylabel('m/y', fontsize=14) grid(True) # Move the plot over to make room for legend box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Make legend ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fig.savefig(fig_dir + 'ismr_depth.png') print 'Saving results to log file' f = open(log_file, 'w') f.write('Basal Mass Loss for ice shelf drafts <' + str(draft_max[0]) + ' m:\n') for t in range(size(time)): f.write(str(massloss[0, t]) + '\n') for n in range(1, num_classes - 1): f.write('Basal Mass Loss for ice shelf drafts ' + str(draft_min[n]) + '-' + str(draft_max[n]) + ' m:\n') for t in range(size(time)): f.write(str(massloss[n, t]) + '\n') f.write('Basal Mass Loss for ice shelf drafts >' + str(draft_min[-1]) + 'm:\n') for t in range(size(time)): f.write(str(massloss[-1, t]) + '\n') f.close()
def timeseries_dpt (mesh_path, ocn_file, log_file): circumpolar = False # Don't transform x and y coordinates, we need them! cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step # Longitude of Drake Passage zonal slice lon0 = -67 # Latitude bounds on Drake Passage zonal slice lat_min = -68 lat_max = -54.5 dpt = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: dpt.append(float(line)) f.close() print 'Building grid' # First get regular 2D elements elm2D = fesom_grid(mesh_path, circumpolar, cross_180) # Read longitude and latitude of each node in order (needed for rotation) fid = open(mesh_path + 'nod3d.out', 'r') fid.readline() lon = [] lat = [] for line in fid: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 lon.append(lon_tmp) lat.append(lat_tmp) fid.close() lon = array(lon) lat = array(lat) print 'Reading data' id = Dataset(ocn_file, 'r') num_time = id.variables['time'].shape[0] # Read both u and v so we can rotate to get the real u u_r = id.variables['u'][:,:] v_r = id.variables['v'][:,:] id.close() print 'Unrotating velocity vector' u = zeros(shape(u_r)) # Rotate one time index at a time for t in range(num_time): u_tmp, v_tmp = unrotate_vector(lon, lat, u_r[t,:], v_r[t,:]) u[t,:] = u_tmp print 'Extracting zonal slice through Drake Passage' # Get quadrilateral elements in the latitude vs depth slice selements = fesom_sidegrid(elm2D, u, lon0, lat_max, lat_min) print 'Setting up arrays' # Eastward velocity at each element u_selm = zeros([num_time, len(selements)]) # Area of each element area_selm = zeros(len(selements)) # Loop over elements to fill these in for i in range(len(selements)): selm = selements[i] u_selm[:,i] = selm.var area_selm[i] = selm.area() # Build timeseries for t in range(num_time): # Integrate u*area and convert to Sv dpt.append(sum(u_selm[t,:]*area_selm)*1e-6) # Calculate time values time = arange(len(dpt))*days_per_output/365. print 'Plotting' clf() plot(time, dpt) xlabel('Years') ylabel('Drake Passage Transport (Sv)') grid(True) savefig('drakepsgtrans.png') print 'Saving results to log file' f = open(log_file, 'w') f.write('Drake Passage Transport (Sv):\n') for elm in dpt: f.write(str(elm) + '\n') f.close()
def cavity_watermass_distribution(): # Path to mesh directory mesh_path = '/short/y99/kaa561/FESOM/mesh/high_res/' # File containing temperature and salinity averaged over rep3 1992-2005 ts_file = '/short/y99/kaa561/FESOM/highres_spinup/rep3/annual_avg.oce.mean.nc' # Number of temperature and salinity bins num_bins = 1000 # Mesh parameters circumpolar = True cross_180 = False # Bounds on temperature and salinity bins (pre-computed, change if needed) min_salt = 32.8 max_salt = 35 min_temp = -3 max_temp = 0.2 # Read temperature and salinity at each 3D node id = Dataset(ts_file, 'r') temp = id.variables['temp'][0, :] salt = id.variables['salt'][0, :] id.close() # Calculate boundaries of temperature bins temp_bins = linspace(min_temp, max_temp, num=num_bins) # Calculate centres of temperature bins (for plotting) temp_centres = 0.5 * (temp_bins[:-1] + temp_bins[1:]) # Repeat for salinity salt_bins = linspace(min_salt, max_salt, num=num_bins) salt_centres = 0.5 * (salt_bins[:-1] + salt_bins[1:]) # Set up a 2D array of temperature bins x salinity bins to increment with # volume of water masses ts_vals = zeros([size(temp_centres), size(salt_centres)]) # Calculate surface freezing point as a function of salinity: this is the # equation the FESOM sea ice code uses freezing_pt = -0.0575 * salt_centres + 1.7105e-3 * sqrt( salt_centres**3) - 2.155e-4 * salt_centres**2 # Make FESOM mesh elements elements = fesom_grid(mesh_path, circumpolar, cross_180) # Loop over elements for elm in elements: # Only consider ice shelf cavities if elm.cavity: # Get area of 2D triangle area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[1].below is None or nodes[ 2].below is None: # We've reached the bottom break # Calculate average temperature, salinity, and layer thickness # over this 3D triangular prism temp_vals = [] salt_vals = [] dz = [] for i in range(3): # Average temperature over 6 nodes temp_vals.append(temp[nodes[i].id]) temp_vals.append(temp[nodes[i].below.id]) # Average salinity over 6 nodes salt_vals.append(salt[nodes[i].id]) salt_vals.append(salt[nodes[i].below.id]) # Average dz over 3 vertical edges dz.append(abs(nodes[i].depth - nodes[i].below.depth)) # Get ready for next repetition of loop nodes[i] = nodes[i].below temp_elm = mean(array(temp_vals)) salt_elm = mean(array(salt_vals)) # Calculate volume of 3D triangular prism volume = area * mean(array(dz)) # Figure out which bins this falls into temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1 salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1 # Increment bins with volume ts_vals[temp_index, salt_index] += volume # Mask bins with zero volume ts_vals = ma.masked_where(ts_vals == 0, ts_vals) # Plot fig = figure(figsize=(8, 6)) ax = fig.add_subplot(1, 1, 1) # Plot log of volume img = pcolor(salt_centres, temp_centres, log(ts_vals), cmap='jet') # Add surface freezing point line plot(salt_centres, freezing_pt, color='black', linestyle='dashed', linewidth=2) # Add dividing line at 34 psu tmp = -0.0575 * 34 + 1.7105e-3 * sqrt(34**3) - 2.155e-4 * 34**2 plot([34, 34], [tmp, 0.2], color='black', linestyle='dashed', linewidth=2) # Add dividing line at 34.5 psu tmp = -0.0575 * 34.5 + 1.7105e-3 * sqrt(34.5**3) - 2.155e-4 * 34.5**2 plot([34.5, 34.5], [tmp, -1], color='black', linestyle='dashed', linewidth=2) # Add dividing line at -1 C plot([34, 35], [-1, -1], color='black', linestyle='dashed', linewidth=2) # Label water masses text(33.25, -2.5, 'ISW', fontsize=20) text(33.5, -0.25, 'AASW', fontsize=20) text(34.12, -1.2, 'WW', fontsize=20) text(34.66, -1.2, 'HSSW', fontsize=20) text(34.5, -0.5, 'MCDW', fontsize=20) # Configure plot xlim([33, max_salt]) ylim([min_temp, max_temp]) xlabel('Salinity (psu)', fontsize=14) ylabel(r'Temperature ($^{\circ}$C)', fontsize=14) title('T-S distribution in ice shelf cavities, 1992-2005', fontsize=18) colorbar(img) # Label colourbar units text(35.5, -1, 'log of volume', fontsize=16, rotation=90) fig.show() fig.savefig('watermass_key.png')
def zonal_cavity_ts_rcp(mesh_path, spinup_path, rcp_path, fig_dir=''): file_name_beg = spinup_path + 'annual_avg.oce.mean.1996.2005.nc' file_name_end = rcp_path + 'annual_avg.oce.mean.2091.2100.nc' # Name of each ice shelf shelf_names = [ 'Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf' ] # Beginnings of filenames for figures fig_heads = [ 'larsen_d', 'larsen_c', 'wilkins_georgevi_stange', 'ronne_filchner', 'abbot', 'pig', 'thwaites', 'dotson', 'getz', 'nickerson', 'sulzberger', 'mertz', 'totten_moscowuni', 'shackleton', 'west', 'amery', 'prince_harald', 'baudouin_borchgrevink', 'lazarev', 'nivl', 'fimbul_jelbart_ekstrom', 'brunt_riiser_larsen', 'ross' ] # Longitudes intersecting each ice shelf lon0 = [ -60, -62, -68, -55, -93, -101, -106, -113, -120, -145, -150, 145, 116, 96, 85, 71, 36, 25, 15, 11, -1, -20, 180 ] # Latitude bounds for each ice shelf lat_min = [ -73.1, -69.35, -73.1, -82.6, -73.28, -75.4, -75.5, -75, -74.9, -75.9, -77.8, -67.7, -67.17, -66.67, -67.25, -72, -69.7, -71, -70.4, -70.75, -71.83, -75.6, -84.6 ] lat_max = [ -72, -66.13, -70, -75.5, -72.3, -74.4, -74.67, -74, -73.5, -75.3, -76.41, -67, -66.5, -64.83, -66.25, -68.5, -68.7, -69.9, -69.33, -69.83, -69.33, -72.9, -77 ] num_shelves = len(shelf_names) print 'Building FESOM mesh' elm2D = fesom_grid(mesh_path) print 'Reading temperature and salinity data' id = Dataset(file_name_beg, 'r') temp_nodes_beg = id.variables['temp'][0, :] salt_nodes_beg = id.variables['salt'][0, :] id.close() id = Dataset(file_name_end, 'r') temp_nodes_end = id.variables['temp'][0, :] salt_nodes_end = id.variables['salt'][0, :] id.close() temp_nodes_diff = temp_nodes_end - temp_nodes_beg salt_nodes_diff = salt_nodes_end - salt_nodes_beg # Loop over ice shelves for index in range(num_shelves): print 'Processing ' + shelf_names[index] # Figure out what to write on the title about longitude if lon0[index] < 0: lon_string = ' (' + str(-lon0[index]) + r'$^{\circ}$W)' else: lon_string = ' (' + str(lon0[index]) + r'$^{\circ}$E)' # Build arrays of SideElements making up zonal slices selements_temp_beg = fesom_sidegrid(elm2D, temp_nodes_beg, lon0[index], lat_max[index]) selements_salt_beg = fesom_sidegrid(elm2D, salt_nodes_beg, lon0[index], lat_max[index]) selements_temp_end = fesom_sidegrid(elm2D, temp_nodes_end, lon0[index], lat_max[index]) selements_salt_end = fesom_sidegrid(elm2D, salt_nodes_end, lon0[index], lat_max[index]) selements_temp_diff = fesom_sidegrid(elm2D, temp_nodes_diff, lon0[index], lat_max[index]) selements_salt_diff = fesom_sidegrid(elm2D, salt_nodes_diff, lon0[index], lat_max[index]) # Build array of quadrilateral patches for the plots, and data values # corresponding to each SideElement patches = [] temp_beg = [] for selm in selements_temp_beg: # Make patch coord = transpose(vstack((selm.y, selm.z))) patches.append(Polygon(coord, True, linewidth=0.)) # Save data value temp_beg.append(selm.var) temp_beg = array(temp_beg) # Other variables have same patches but different values salt_beg = [] for selm in selements_salt_beg: salt_beg.append(selm.var) salt_beg = array(salt_beg) temp_end = [] for selm in selements_temp_end: temp_end.append(selm.var) temp_end = array(temp_end) salt_end = [] for selm in selements_salt_end: salt_end.append(selm.var) salt_end = array(salt_end) temp_diff = [] for selm in selements_temp_diff: temp_diff.append(selm.var) temp_diff = array(temp_diff) salt_diff = [] for selm in selements_salt_diff: salt_diff.append(selm.var) salt_diff = array(salt_diff) # Find bounds on each variable temp_min = min(amin(temp_beg), amin(temp_end)) temp_max = max(amax(temp_beg), amax(temp_end)) temp_max_diff = amax(abs(temp_diff)) salt_min = min(amin(salt_beg), amin(salt_end)) salt_max = max(amax(salt_beg), amax(salt_end)) salt_max_diff = amax(abs(salt_diff)) # Find deepest depth depth_min = 0 for selm in selements_temp_beg: depth_min = min(depth_min, amin(selm.z)) # Round down to nearest 50 metres depth_min = floor(depth_min / 50) * 50 # Plot fig = figure(figsize=(24, 12)) # Temperature (beginning) ax = fig.add_subplot(2, 3, 1) img = PatchCollection(patches, cmap='jet') img.set_array(temp_beg) img.set_edgecolor('face') img.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title(r'Temperature ($^{\circ}$C), 1996-2005', fontsize=20) ylabel('Depth (m)', fontsize=16) # Add colorbar for absolute temperature cbaxes_temp = fig.add_axes([0.05, 0.575, 0.01, 0.3]) cbar_temp = colorbar(img, cax=cbaxes_temp) cbar_temp.ax.tick_params(labelsize=16) # Temperature (end) ax = fig.add_subplot(2, 3, 2) img = PatchCollection(patches, cmap='jet') img.set_array(temp_end) img.set_edgecolor('face') img.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title(r'Temperature ($^{\circ}$C), 2091-2100', fontsize=20) ylabel('Depth (m)', fontsize=16) # Temperature (difference) ax = fig.add_subplot(2, 3, 3) img = PatchCollection(patches, cmap='RdBu_r') img.set_array(temp_diff) img.set_edgecolor('face') img.set_clim(vmin=-temp_max_diff, vmax=temp_max_diff) ax.add_collection(img) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title(r'Temperature ($^{\circ}$C), change', fontsize=20) ylabel('Depth (m)', fontsize=16) # Add colorbar for temperature difference cbaxes_dtemp = fig.add_axes([0.92, 0.575, 0.01, 0.3]) cbar_dtemp = colorbar(img, cax=cbaxes_dtemp) cbar_dtemp.ax.tick_params(labelsize=16) # Salinity (beginning) ax = fig.add_subplot(2, 3, 4) img = PatchCollection(patches, cmap='jet') img.set_array(salt_beg) img.set_edgecolor('face') img.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title('Salinity (psu), 1995-2005', fontsize=20) ylabel('Depth (m)', fontsize=16) # Add colorbar for absolute salinity cbaxes_salt = fig.add_axes([0.05, 0.125, 0.01, 0.3]) cbar_salt = colorbar(img, cax=cbaxes_salt) cbar_salt.ax.tick_params(labelsize=16) # Salinity (end) ax = fig.add_subplot(2, 3, 5) img = PatchCollection(patches, cmap='jet') img.set_array(salt_end) img.set_edgecolor('face') img.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title('Salinity (psu), 2091-2100', fontsize=20) ylabel('Depth (m)', fontsize=16) # Salinity (difference) ax = fig.add_subplot(2, 3, 6) img = PatchCollection(patches, cmap='RdBu_r') img.set_array(salt_diff) img.set_edgecolor('face') img.set_clim(vmin=-salt_max_diff, vmax=salt_max_diff) ax.add_collection(img) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) title('Salinity (psu), change', fontsize=20) ylabel('Depth (m)', fontsize=16) # Add colorbar for salinity difference cbaxes_dsalt = fig.add_axes([0.92, 0.125, 0.01, 0.3]) cbar_dsalt = colorbar(img, cax=cbaxes_dsalt) cbar_dsalt.ax.tick_params(labelsize=16) # Main title suptitle(shelf_names[index] + lon_string, fontsize=28) #fig.show() fig.savefig(fig_dir + fig_heads[index] + '_zonal_ts.png')
def moc_lat_density(mesh_path, file_path, save=False, fig_name=None): # Options for grid objects circumpolar = False cross_180 = False # Read vertical velocity, temperature, and salinity at every node id = Dataset(file_path, 'r') w = mean(id.variables['w'][:, :], axis=0) temp = mean(id.variables['temp'][:, :], axis=0) salt = mean(id.variables['salt'][:, :], axis=0) id.close() # Calculate potential density (depth 0) at every node density = unesco(temp, salt, zeros(shape(temp))) - 1000 # Build FESOM grid elements = fesom_grid(mesh_path, circumpolar, cross_180) # Set up arrays of vertical transport, latitude, upstream density, and # downstream density at every interface between vertical layers of elements transport_all = [] lat_all = [] density_us_all = [] density_ds_all = [] # Loop over 2D elements for elm in elements: # Get area and latitude (average over 3 nodes) area = elm.area() lat = mean(elm.lat) nodes_above = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] nodes = [ nodes_above[0].below, nodes_above[1].below, nodes_above[2].below ] # Loop from the second layer from the surface, down to the second layer # from the bottom while True: nodes_below = [nodes[0].below, nodes[1].below, nodes[2].below] if None in nodes_below: # Reached the bottom break # Vertical velocity average over 3 nodes w_avg = mean([w[nodes[0].id], w[nodes[1].id], w[nodes[2].id]]) # Vertical transport through this triangular interface transport = abs(w_avg) * area * 1e-6 # Density average over 3D triangular prism above density_above = mean([ density[nodes[0].id], density[nodes[1].id], density[nodes[2].id], density[nodes_above[0].id], density[nodes_above[1].id], density[nodes_above[2].id] ]) # Density average over 3D triangular prism below density_below = mean([ density[nodes[0].id], density[nodes[1].id], density[nodes[2].id], density[nodes_below[0].id], density[nodes_below[1].id], density[nodes_below[2].id] ]) # Figure out which is triangular prism upstream and which is # downstream; save the density values correspondingly if w_avg > 0: density_us = density_below density_ds = density_above else: density_us = density_above density_ds = density_below # Save vertical transport, latitude, upstream and downstream # densities for this interface transport_all.append(transport) lat_all.append(lat) density_us_all.append(density_us) density_ds_all.append(density_ds) # Get ready for next layer down nodes_above = nodes nodes = nodes_below # Get regular values of latitude and density lat_reg = linspace(-90, 90, num=50) density_reg = linspace(floor(amin(density)), ceil(amax(density)), num=25) # Set up array for overturning streamfunction moc = zeros([size(density_reg), size(lat_reg)]) # Loop over latitude for j in range(size(lat_reg)): print 'Processing latitude ' + str(j + 1) + ' of ' + str(size(lat_reg)) # Make a flag which is 1 for interfaces south of the current latitude, # 0 otherwise flag_lat = zeros(shape(lat_all)) index = lat_all <= lat_reg[j] flag_lat[index] = 1 # Loop over density for k in range(size(density_reg)): # Make a flag which is 1 or -1 (depending on direction) for # interfaces where the upstream-downstream density gradient crosses # the current density, 0 otherwise flag_density = zeros(shape(density_us_all)) index = (density_us_all <= density_reg[k]) * (density_ds_all >= density_reg[k]) flag_density[index] = 1 index = (density_ds_all <= density_reg[k]) * (density_us_all >= density_reg[k]) flag_density[index] = -1 # Calculate MOC moc[k, j] = sum(transport_all * flag_lat * flag_density) # Make colour levels bound = amax(abs(moc)) lev = linspace(-bound, bound, num=50) # Plot fig = figure() img = contourf(lat_reg, density_reg, moc, lev, cmap='RdBu_r') ylim([density_reg[-1], density_reg[0]]) xlabel('Latitude') ylabel(r'Density (kg/m$^3$)') title('Meridional Overturning Streamfunction (Sv)') colorbar(img) if save: fig.savefig(fig_name) else: fig.show()
def fesom_intersectgrid(mesh_path, file_path, var_name, tstep, lon_min, lon_max, lat_min, lat_max, depth_min, depth_max, num_lat, num_depth): if lon_min == -180 and lon_max == 180: lon_bounds = False else: lon_bounds = True # Build the regular FESOM grid elements = fesom_grid(mesh_path, cross_180=False) # Read data id = Dataset(file_path, 'r') data = id.variables[var_name][tstep - 1, :] # Check for vector variables that need to be unrotated if var_name in ['u', 'v']: # Read the rotated lat and lon fid = open(mesh_path + 'nod3d.out', 'r') fid.readline() lon = [] lat = [] for line in fid: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 lon.append(lon_tmp) lat.append(lat_tmp) fid.close() lon = array(lon) lat = array(lat) if var_name == 'u': u_data = data[:] v_data = id.variables['v'][tstep - 1, :] u_data_lonlat, v_data_lonlat = unrotate_vector( lon, lat, u_data, v_data) data = u_data_lonlat[:] elif var_name == 'v': v_data = data[:] u_data = id.variables['u'][tstep - 1, :] u_data_lonlat, v_data_lonlat = unrotate_vector( lon, lat, u_data, v_data) data = v_data_lonlat[:] id.close() # Build the regular grid lat_vals = linspace(lat_min, lat_max, num_lat) # Make depth positive to match the "depth" attribute in grid Nodes depth_vals = -1 * linspace(depth_min, depth_max, num_depth) # Set up array of NaNs to overwrite with zonally averaged data data_reg = zeros((num_depth, num_lat)) data_reg[:, :] = NaN # Process one latitude value at a time for j in range(num_lat): ielm_list = [] # Loop over 2D grid Elements for elm in elements: # Select elements which intersect the current latitude, and which # fall entirely between the longitude bounds if lon_bounds: keep = any(elm.y <= lat_vals[j]) and any( elm.y >= lat_vals[j]) and all(elm.x >= lon_min) and all( elm.x <= lon_max) else: # No bounds on longitude keep = any(elm.y <= lat_vals[j]) and any(elm.y >= lat_vals[j]) if keep: # Create an IntersectElement ielm = create_ielm(elm, lat_vals[j], depth_vals, data) # Check for cases where the Element intersected the given # latitude at exactly one corner; these aren't useful if ielm is not None: ielm_list.append(ielm) # Zonally average at each depth for k in range(num_depth): # Set up integrals of var*dx and dx int_vardx = 0 int_dx = 0 for ielm in ielm_list: # Check if data exists at the current depth level if ielm.var[k] is not NaN: int_vardx += ielm.var[k] * ielm.dx int_dx += ielm.dx if int_dx > 0: data_reg[k, j] = int_vardx / int_dx # Convert depth back to negative for plotting depth_vals = -1 * depth_vals return lat_vals, depth_vals, data_reg
var_name = 'temp' elif var_key == 's': var_name = 'salt' lon0 = float(raw_input("Enter longitude (-180 to 180): ")) depth_min = -1 * float( raw_input("Deepest depth to plot (positive, metres): ")) action = raw_input("Save figure (s) or display on screen (d)? ") if action == 's': save = True fig_name = raw_input("File name for figure: ") elif action == 'd': save = False fig_name = None # Build the FESOM mesh ahead of time elements = fesom_grid(mesh_path) sose_fesom_seasonal(elements, file_path1, file_path2, var_name, lon0, depth_min, save, fig_name) # Repeat until the user wants to exit while True: repeat = raw_input("Make another plot (y/n)? ") if repeat == 'y': while True: # Ask for changes to the input parameters; repeat until the user is finished changes = raw_input( "Enter a parameter to change: (1) file paths, (2) temperature/salinity, (3) longitude, (4) deepest depth, (5) save/display; or enter to continue: " ) if len(changes) == 0: # No more changes to parameters break
def mip_zonal_cavity_ts (roms_grid, roms_file, fesom_mesh_path_lr, fesom_file_lr, fesom_mesh_path_hr, fesom_file_hr): # Name of each ice shelf shelf_names = ['Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'] # Beginnings of filenames for figures fig_heads = ['larsen_d', 'larsen_c', 'wilkins_georgevi_stange', 'ronne_filchner', 'abbot', 'pig', 'thwaites', 'dotson', 'getz', 'nickerson', 'sulzberger', 'mertz', 'totten_moscowuni', 'shackleton', 'west', 'amery', 'prince_harald', 'baudouin_borchgrevink', 'lazarev', 'nivl', 'fimbul_jelbart_ekstrom', 'brunt_riiser_larsen', 'ross'] # Longitudes intersecting each ice shelf lon0 = [-60, -62, -68, -55, -93, -101, -106, -113, -120, -145, -150, 145, 116, 96, 85, 71, 36, 25, 15, 11, -1, -20, 180] # Latitude bounds for each ice shelf lat_min = [-73.1, -69.35, -73.1, -82.6, -73.28, -75.4, -75.5, -75, -74.9, -75.9, -77.8, -67.7, -67.17, -66.67, -67.25, -72, -69.7, -71, -70.4, -70.75, -71.83, -75.6, -84.6] lat_max = [-72, -66.13, -70, -75.5, -72.3, -74.4, -74.67, -74, -73.5, -75.3, -76.41, -67, -66.5, -64.83, -66.25, -68.5, -68.7, -69.9, -69.33, -69.83, -69.33, -72.9, -77] num_shelves = len(shelf_names) # ROMS grid parameters theta_s = 7.0 theta_b = 2.0 hc = 250 N = 31 print 'Setting up ROMS' # Start with grid id = Dataset(roms_grid, 'r') h = id.variables['h'][:,:] zice = id.variables['zice'][:,:] lon_2d = id.variables['lon_rho'][:,:] lat_2d = id.variables['lat_rho'][:,:] id.close() # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N) # Read temperature and salinity id = Dataset(roms_file, 'r') roms_temp_3d = id.variables['temp'][0,:,:,:] roms_salt_3d = id.variables['salt'][0,:,:,:] id.close() print 'Setting up low-res FESOM' # Build the regular FESOM grid elm2D_lr = fesom_grid(fesom_mesh_path_lr) # Read temperature and salinity at every node id = Dataset(fesom_file_lr, 'r') fesom_temp_nodes_lr = id.variables['temp'][0,:] fesom_salt_nodes_lr = id.variables['salt'][0,:] id.close() print 'Setting up high-res FESOM' elm2D_hr = fesom_grid(fesom_mesh_path_hr) id = Dataset(fesom_file_hr, 'r') fesom_temp_nodes_hr = id.variables['temp'][0,:] fesom_salt_nodes_hr = id.variables['salt'][0,:] id.close() # Loop over ice shelves for index in range(num_shelves): print 'Processing ' + shelf_names[index] # Figure out what to write on the title about longitude if lon0[index] < 0: lon_string = ' ('+str(-lon0[index])+r'$^{\circ}$W)' else: lon_string = ' ('+str(lon0[index])+r'$^{\circ}$E)' # MetROMS # Make sure longitude is between 0 and 360 roms_lon0 = lon0[index] if roms_lon0 < 0: roms_lon0 += 360 # Interpolate to given longitude roms_temp, roms_z, roms_lat = interp_lon_roms(roms_temp_3d, z_3d, lat_2d, lon_2d, roms_lon0) roms_salt, roms_z, roms_lat = interp_lon_roms(roms_salt_3d, z_3d, lat_2d, lon_2d, roms_lon0) # Figure out deepest depth flag = (roms_lat >= lat_min[index])*(roms_lat <= lat_max[index]) depth_min_tmp = amin(roms_z[flag]) # Round down to nearest 50 metres depth_min = floor(depth_min_tmp/50)*50 # FESOM low-res # Build arrays of SideElements making up zonal slices selements_temp_lr = fesom_sidegrid(elm2D_lr, fesom_temp_nodes_lr, lon0[index], lat_max[index]) selements_salt_lr = fesom_sidegrid(elm2D_lr, fesom_salt_nodes_lr, lon0[index], lat_max[index]) # Build array of quadrilateral patches for the plots, and data values # corresponding to each SideElement patches_lr = [] fesom_temp_lr = [] for selm in selements_temp_lr: # Make patch coord = transpose(vstack((selm.y, selm.z))) patches_lr.append(Polygon(coord, True, linewidth=0.)) # Save data value fesom_temp_lr.append(selm.var) fesom_temp_lr = array(fesom_temp_lr) # Salinity has same patches but different values fesom_salt_lr = [] for selm in selements_salt_lr: fesom_salt_lr.append(selm.var) fesom_salt_lr = array(fesom_salt_lr) # FESOM high-res selements_temp_hr = fesom_sidegrid(elm2D_hr, fesom_temp_nodes_hr, lon0[index], lat_max[index]) selements_salt_hr = fesom_sidegrid(elm2D_hr, fesom_salt_nodes_hr, lon0[index], lat_max[index]) patches_hr = [] fesom_temp_hr = [] for selm in selements_temp_hr: coord = transpose(vstack((selm.y, selm.z))) patches_hr.append(Polygon(coord, True, linewidth=0.)) fesom_temp_hr.append(selm.var) fesom_temp_hr = array(fesom_temp_hr) fesom_salt_hr = [] for selm in selements_salt_hr: fesom_salt_hr.append(selm.var) fesom_salt_hr = array(fesom_salt_hr) # Find bounds on each variable temp_min = amin(array([amin(roms_temp[flag]), amin(fesom_temp_lr), amin(fesom_temp_hr)])) temp_max = amax(array([amax(roms_temp[flag]), amax(fesom_temp_lr), amax(fesom_temp_hr)])) salt_min = amin(array([amin(roms_salt[flag]), amin(fesom_salt_lr), amin(fesom_salt_hr)])) salt_max = amax(array([amax(roms_salt[flag]), amax(fesom_salt_lr), amax(fesom_salt_hr)])) # Plot fig = figure(figsize=(24,12)) # MetROMS temperature ax = fig.add_subplot(2, 3, 1) pcolor(roms_lat, roms_z, roms_temp, vmin=temp_min, vmax=temp_max, cmap='jet') title(r'MetROMS temperature ($^{\circ}$C)', fontsize=20) ylabel('Depth (m)', fontsize=16) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) # FESOM low-res temperature ax = fig.add_subplot(2, 3, 2) img = PatchCollection(patches_lr, cmap='jet') img.set_array(fesom_temp_lr) img.set_edgecolor('face') img.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img) title(r'FESOM (low-res) temperature ($^{\circ}$C)', fontsize=20) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) # FESOM high-res temperature ax = fig.add_subplot(2, 3, 3) img = PatchCollection(patches_hr, cmap='jet') img.set_array(fesom_temp_hr) img.set_edgecolor('face') img.set_clim(vmin=temp_min, vmax=temp_max) ax.add_collection(img) title(r'FESOM (high-res) temperature ($^{\circ}$C)', fontsize=20) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) # Add colorbar for temperature cbaxes = fig.add_axes([0.92, 0.575, 0.01, 0.3]) cbar = colorbar(img, cax=cbaxes) cbar.ax.tick_params(labelsize=16) # MetROMS salinity ax = fig.add_subplot(2, 3, 4) pcolor(roms_lat, roms_z, roms_salt, vmin=salt_min, vmax=salt_max, cmap='jet') title('MetROMS salinity (psu)', fontsize=20) xlabel('Latitude', fontsize=16) ylabel('Depth (m)', fontsize=16) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) # FESOM low-res salinity ax = fig.add_subplot(2, 3, 5) img = PatchCollection(patches_lr, cmap='jet') img.set_array(fesom_salt_lr) img.set_edgecolor('face') img.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img) title(r'FESOM (low-res) salinity (psu)', fontsize=20) xlabel('Latitude', fontsize=16) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) # FESOM high-res salinity ax = fig.add_subplot(2, 3, 6) img = PatchCollection(patches_hr, cmap='jet') img.set_array(fesom_salt_hr) img.set_edgecolor('face') img.set_clim(vmin=salt_min, vmax=salt_max) ax.add_collection(img) title(r'FESOM (high-res) salinity (psu)', fontsize=20) xlabel('Latitude', fontsize=16) xlim([lat_min[index], lat_max[index]]) ylim([depth_min, 0]) # Add colorbar for salinity cbaxes = fig.add_axes([0.92, 0.125, 0.01, 0.3]) cbar = colorbar(img, cax=cbaxes) cbar.ax.tick_params(labelsize=16) # Main title suptitle(shelf_names[index] + lon_string, fontsize=28) #fig.show() fig.savefig(fig_heads[index] + '_zonal_ts.png')
def timeseries_seaice(mesh_path, ice_file, log_file, fig_dir=''): circumpolar = True # Only consider elements south of 30S cross_180 = False # Don't make second copies of elements that cross 180E days_per_output = 5 # Number of days for each output step total_area = [] total_volume = [] # Check if the log file exists if exists(log_file): print 'Reading previously calculated values' f = open(log_file, 'r') # Skip the first line (header) f.readline() for line in f: try: total_area.append(float(line)) except (ValueError): # Reached the header for the next variable break for line in f: total_volume.append(float(line)) f.close() print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Reading data' id = Dataset(ice_file, 'r') num_time = id.variables['time'].shape[0] aice = id.variables['area'][:, :] hice = id.variables['hice'][:, :] id.close() print 'Setting up arrays' # Sea ice concentration at each element aice_elm = zeros([num_time, len(elements)]) # Sea ice height at each element hice_elm = zeros([num_time, len(elements)]) # Area of each element area_elm = zeros(len(elements)) # Loop over elements to fill these in for i in range(len(elements)): elm = elements[i] # Average aice and hi over 3 component nodes aice_elm[:, i] = (aice[:, elm.nodes[0].id] + aice[:, elm.nodes[1].id] + aice[:, elm.nodes[2].id]) / 3 hice_elm[:, i] = (hice[:, elm.nodes[0].id] + hice[:, elm.nodes[1].id] + hice[:, elm.nodes[2].id]) / 3 # Call area function area_elm[i] = elm.area() # Build timeseries for t in range(num_time): # Integrate area and convert to million km^2 total_area.append(sum(aice_elm[t, :] * area_elm) * 1e-12) # Integrate volume and convert to thousand km^3 total_volume.append( sum(aice_elm[t, :] * hice_elm[t, :] * area_elm) * 1e-12) # Calculate time values time = arange(len(total_area)) * days_per_output / 365. print 'Plotting total sea ice area' clf() plot(time, total_area) xlabel('Years') ylabel(r'Total Sea Ice Area (million km$^2$)') grid(True) savefig(fig_dir + 'seaice_area.png') print 'Plotting total sea ice volume' clf() plot(time, total_volume) xlabel('Years') ylabel(r'Total Sea Ice Volume (thousand km$^3$)') grid(True) savefig(fig_dir + 'seaice_volume.png') print 'Saving results to log file' f = open(log_file, 'w') f.write('Total Sea Ice Area (million km^2):\n') for elm in total_area: f.write(str(elm) + '\n') f.write('Total Sea Ice Volume (thousand km^3):\n') for elm in total_volume: f.write(str(elm) + '\n') f.close()
def barotropic_streamfunction_diff(): mesh_path = '/short/y99/kaa561/FESOM/mesh/meshB/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = [ '/short/y99/kaa561/FESOM/rcp45_M/', '/short/y99/kaa561/FESOM/rcp45_A/', '/short/y99/kaa561/FESOM/rcp85_M/', '/short/y99/kaa561/FESOM/rcp85_A/' ] file_beg = 'annual_avg.oce.mean.1996.2005.nc' file_end = 'annual_avg.oce.mean.2091.2100.nc' num_expts = len(directories) expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A'] expt_filetails = ['rcp45_M', 'rcp45_A', 'rcp85_M', 'rcp85_A'] # Bounds on regular grid lon_min = -180 lon_max = 180 lat_min = -85 lat_max = -60 # Number of points on regular grid num_lon = 1000 num_lat = 250 # Radius of the Earth in metres r = 6.371e6 # Degrees to radians coversion factor deg2rad = pi / 180.0 print 'Building mesh' elements = fesom_grid(mesh_path, circumpolar=False, cross_180=True) # Read number of 2D nodes f = open(mesh_path + 'nod2d.out', 'r') n2d = int(f.readline()) f.close() # Read (rotated) lon, lat, and depth, at each 3D node f = open(mesh_path + 'nod3d.out', 'r') f.readline() rlon = [] rlat = [] node_depth = [] for line in f: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) node_depth_tmp = -1 * float(tmp[3]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 rlon.append(lon_tmp) rlat.append(lat_tmp) node_depth.append(node_depth_tmp) f.close() rlon = array(rlon) rlat = array(rlat) node_depth = array(node_depth) # Read lists of which nodes are directly below which f = open(mesh_path + 'aux3d.out', 'r') max_num_layers = int(f.readline()) node_columns = zeros([n2d, max_num_layers]) for n in range(n2d): for k in range(max_num_layers): node_columns[n, k] = int(f.readline()) node_columns = node_columns.astype(int) f.close() # Set up regular grid # Start with boundaries lon_reg_edges = linspace(lon_min, lon_max, num_lon + 1) lat_reg_edges = linspace(lat_min, lat_max, num_lat + 1) # Now get centres lon_reg = 0.5 * (lon_reg_edges[:-1] + lon_reg_edges[1:]) lat_reg = 0.5 * (lat_reg_edges[:-1] + lat_reg_edges[1:]) # Also get differentials in lon-lat space dlon = lon_reg_edges[1:] - lon_reg_edges[:-1] dlat = lat_reg_edges[1:] - lat_reg_edges[:-1] # Make 2D versions lon_reg_2d, lat_reg_2d = meshgrid(lon_reg, lat_reg) dlon_2d, dlat_2d = meshgrid(dlon, dlat) # Calculate differentials in Cartesian space dx = r * cos(lat_reg_2d * deg2rad) * dlon_2d * deg2rad dy = r * dlat_2d * deg2rad print 'Reading data' print '...1996-2005' # Read 3D rotated u and v id = Dataset(directory_beg + file_beg, 'r') ur = id.variables['u'][0, :] vr = id.variables['v'][0, :] id.close() # Unrotate u, v = unrotate_vector(rlon, rlat, ur, vr) # Vertically integrate u*dz int_udz_beg = zeros(n2d) # Loop over nodes for n in range(n2d): # Loop over depth for k in range(max_num_layers - 1): if node_columns[n, k + 1] == -999: # Reached the bottom break # Trapezoidal rule top_id = node_columns[n, k] bot_id = node_columns[n, k + 1] dz = node_depth[bot_id - 1] - node_depth[top_id - 1] int_udz_beg[n] += 0.5 * (u[top_id - 1] + u[bot_id - 1]) * dz int_udz_end = zeros([num_expts, n2d]) for expt in range(num_expts): print '...' + expt_names[expt] id = Dataset(directories[expt] + file_end, 'r') ur = id.variables['u'][0, :] vr = id.variables['v'][0, :] id.close() u, v = unrotate_vector(rlon, rlat, ur, vr) for n in range(n2d): for k in range(max_num_layers - 1): if node_columns[n, k + 1] == -999: break top_id = node_columns[n, k] bot_id = node_columns[n, k + 1] dz = node_depth[bot_id - 1] - node_depth[top_id - 1] int_udz_end[expt, n] += 0.5 * (u[top_id - 1] + u[bot_id - 1]) * dz print 'Interpolating to regular grid' int_udz_reg_beg = zeros([num_lat, num_lon]) int_udz_reg_end = zeros([num_expts, num_lat, num_lon]) # For each element, check if a point on the regular lat-lon grid lies # within. If so, do barycentric interpolation to that point. for elm in elements: # Check if we are within domain of regular grid if amin(elm.lat) > lat_max: continue # Find largest regular longitude value west of Element tmp = nonzero(lon_reg > amin(elm.lon))[0] if len(tmp) == 0: # Element crosses the western boundary iW = 0 else: iW = tmp[0] - 1 # Find smallest regular longitude value east of Element tmp = nonzero(lon_reg > amax(elm.lon))[0] if len(tmp) == 0: # Element crosses the eastern boundary iE = num_lon else: iE = tmp[0] # Find largest regular latitude value south of Element tmp = nonzero(lat_reg > amin(elm.lat))[0] if len(tmp) == 0: # Element crosses the southern boundary jS = 0 else: jS = tmp[0] - 1 # Find smallest regular latitude value north of Element tmp = nonzero(lat_reg > amax(elm.lat))[0] if len(tmp) == 0: # Element crosses the northern boundary jN = num_lat else: jN = tmp[0] for i in range(iW + 1, iE): for j in range(jS + 1, jN): # There is a chance that the regular gridpoint at (i,j) # lies within this element lon0 = lon_reg[i] lat0 = lat_reg[j] if in_triangle(elm, lon0, lat0): # Get area of entire triangle area = triangle_area(elm.lon, elm.lat) # Get area of each sub-triangle formed by # (lon0, lat0) area0 = triangle_area([lon0, elm.lon[1], elm.lon[2]], [lat0, elm.lat[1], elm.lat[2]]) area1 = triangle_area([lon0, elm.lon[0], elm.lon[2]], [lat0, elm.lat[0], elm.lat[2]]) area2 = triangle_area([lon0, elm.lon[0], elm.lon[1]], [lat0, elm.lat[0], elm.lat[1]]) # Find fractional area of each cff = [area0 / area, area1 / area, area2 / area] # Find value of int_udz at each Node # 1996-2005 vals = [] for n in range(3): vals.append(int_udz_beg[elm.nodes[n].id]) # Barycentric interpolation to lon0, lat0 int_udz_reg_beg[j, i] = sum(array(cff) * array(vals)) # Loop over other experiments for expt in range(num_expts): vals = [] for n in range(3): vals.append(int_udz_end[expt, elm.nodes[n].id]) int_udz_reg_end[expt, j, i] = sum(array(cff) * array(vals)) # Indefinite integral from south to north of udz*dy, convert to Sv strf_beg = cumsum(int_udz_reg_beg * dy, axis=0) * 1e-6 # Apply land mask: wherever interpolated field was identically zero strf_beg = ma.masked_where(int_udz_reg_beg == 0, strf_beg) # Calculate difference for each RCP experiment strf_diff = ma.empty(shape(int_udz_reg_end)) for expt in range(num_expts): strf_end = cumsum(int_udz_reg_end[expt, :, :] * dy, axis=0) * 1e-6 strf_end = ma.masked_where(int_udz_reg_beg == 0, strf_end) strf_diff[expt, :, :] = strf_end - strf_beg print 'Plotting' print '...1996-2005' bound = amax(abs(strf_beg)) fig = figure(figsize=(10, 6)) ax = fig.add_subplot(1, 1, 1) pcolor(lon_reg, lat_reg, strf_beg, vmin=-bound, vmax=bound, cmap='RdBu_r') xlabel('Longitude') ylabel('Latitude') xlim([lon_min, lon_max]) ylim([lat_min, lat_max]) colorbar() title('Barotropic streamfunction (Sv), 1996-2005', fontsize=20) fig.savefig('strf_beg.png') for expt in range(num_expts): print '...' + expt_names[expt] bound = amax(abs(strf_diff[expt, :, :])) fig = figure(figsize=(10, 6)) ax = fig.add_subplot(1, 1, 1) pcolor(lon_reg, lat_reg, strf_diff[expt, :, :], vmin=-bound, vmax=bound, cmap='RdBu_r') xlabel('Longitude') ylabel('Latitude') xlim([lon_min, lon_max]) ylim([lat_min, lat_max]) colorbar() title('Barotropic streamfunction (Sv), 2091-2100 minus 1996-2005 (' + expt_names[expt] + ')', fontsize=20) fig.savefig('strf_diff_' + expt_filetails[expt] + '.png')
def rcp_ts_distribution (key=1): # File paths mesh_path = '/short/y99/kaa561/FESOM/mesh/high_res/' directory_beg = '/short/y99/kaa561/FESOM/highres_spinup/' directories = ['/short/y99/kaa561/FESOM/rcp45_M_highres/output/', '/short/y99/kaa561/FESOM/rcp45_A_highres/output/', '/short/y99/kaa561/FESOM/rcp85_M_highres/output/', '/short/y99/kaa561/FESOM/rcp85_A_highres/output/', '/short/y99/kaa561/FESOM/highres_spinup/'] file_beg = 'annual_avg.oce.mean.1996.2005.nc' file_end = 'annual_avg.oce.mean.2091.2100.nc' # Titles for plotting expt_names = ['RCP 4.5 M', 'RCP 4.5 A', 'RCP 8.5 M', 'RCP 8.5 A', 'CONTROL'] num_expts = len(directories) # Start and end years for each period beg_years = [1996, 2005] end_years = [2091, 2100] # Northern boundary of water masses to consider nbdry = -65 # Number of temperature and salinity bins num_bins = 1000 # Bounds on temperature and salinity bins (pre-computed, change if needed) min_salt = 32.3 max_salt = 35.1 min_temp = -3.1 max_temp = 3.8 # Bounds to actually plot if key==1: min_salt_plot = 32.25 max_salt_plot = 35 min_temp_plot = -3 max_temp_plot = 3.25 elif key==2: min_salt_plot = 34 max_salt_plot = 35 min_temp_plot = -2.5 max_temp_plot = -1 # FESOM grid generation parameters circumpolar = False cross_180 = False print 'Setting up bins' # Calculate boundaries of temperature bins temp_bins = linspace(min_temp, max_temp, num=num_bins) # Calculate centres of temperature bins (for plotting) temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:]) # Repeat for salinity salt_bins = linspace(min_salt, max_salt, num=num_bins) salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:]) # Set up 3D array of experiment x temperature bins x salinity bins to hold # average depth of water masses, weighted by volume ts_vals = zeros([num_expts+1, size(temp_centres), size(salt_centres)]) # Also array to integrate volume of each bin volume = zeros([num_expts+1, size(temp_centres), size(salt_centres)]) # Calculate surface freezing point as a function of salinity as seen by # sea ice model freezing_pt = -0.0575*salt_centres + 1.7105e-3*sqrt(salt_centres**3) - 2.155e-4*salt_centres**2 # Get 2D versions of the temperature and salinity bins salt_2d, temp_2d = meshgrid(salt_centres, temp_centres) # Calculate potential density of each combination of temperature and # salinity bins density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres)))-1000 # Density contours to plot if key == 1: density_lev = arange(25.8, 28.4, 0.2) elif key == 2: density_lev = arange(27.2, 28.4, 0.2) print 'Building grid' elements = fesom_grid(mesh_path, circumpolar, cross_180) print 'Reading data' # 1996-2005 id = Dataset(directory_beg + file_beg) n3d = id.variables['temp'].shape[1] temp_nodes = empty([num_expts+1, n3d]) salt_nodes = empty([num_expts+1, n3d]) temp_nodes[0,:] = id.variables['temp'][0,:] salt_nodes[0,:] = id.variables['salt'][0,:] id.close() # Loop over RCPs for expt in range(num_expts): id = Dataset(directories[expt] + file_end) temp_nodes[expt+1,:] = id.variables['temp'][0,:] salt_nodes[expt+1,:] = id.variables['salt'][0,:] id.close() print 'Binning elements' for elm in elements: # See if we're in the region of interest if all(elm.lat < nbdry): # Get area of 2D triangle area = elm.area() nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] # Loop downward while True: if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None: # We've reached the bottom break # Calculate average temperature and salinity for each # experiment, as well as depth and layer thickness, over this # 3D triangular prism. temp_vals = empty([num_expts+1, 6]) salt_vals = empty([num_expts+1, 6]) depth_vals = empty(6) dz = empty(3) for i in range(3): # Loop over experiments for expt in range(num_expts+1): # Average temperature over 6 nodes temp_vals[expt,i] = temp_nodes[expt,nodes[i].id] temp_vals[expt,i+3] = temp_nodes[expt,nodes[i].below.id] salt_vals[expt,i] = salt_nodes[expt,nodes[i].id] salt_vals[expt,i+3] = salt_nodes[expt,nodes[i].below.id] # Average depth over 6 nodes depth_vals[i] = nodes[i].depth depth_vals[i+3] = nodes[i].below.depth # Average dz over 3 vertical edges dz[i] = abs(nodes[i].depth - nodes[i].below.depth) # Get ready for next repetition of loop nodes[i] = nodes[i].below temp_elm = mean(temp_vals, axis=1) salt_elm = mean(salt_vals, axis=1) depth_elm = mean(depth_vals) # Calculate volume of 3D triangular prism curr_volume = area*mean(dz) # Loop over experiments again for expt in range(num_expts+1): # Figure out which bins this falls into temp_index = nonzero(temp_bins > temp_elm[expt])[0][0] - 1 salt_index = nonzero(salt_bins > salt_elm[expt])[0][0] - 1 # Integrate depth*volume in this bin ts_vals[expt, temp_index, salt_index] += depth_elm*curr_volume volume[expt, temp_index, salt_index] += curr_volume # Mask bins with zero volume ts_vals = ma.masked_where(volume==0, ts_vals) volume = ma.masked_where(volume==0, volume) # Convert depths from integrals to volume-averages ts_vals /= volume # Find the maximum depth for plotting if key == 1: max_depth = amax(ts_vals) elif key == 2: temp_start = nonzero(temp_bins > min_temp_plot)[0][0]-2 temp_end = nonzero(temp_bins > max_temp_plot)[0][0] salt_start = nonzero(salt_bins > min_salt_plot)[0][0]-2 salt_end = nonzero(salt_bins > max_salt_plot)[0][0] max_depth = amax(ts_vals[:,temp_start:temp_end, salt_start:salt_end]) # Make a nonlinear colour scale bounds = linspace(0, max_depth**(1.0/2.5), num=100)**2.5 norm = BoundaryNorm(boundaries=bounds, ncolors=256) print 'Plotting' fig = figure(figsize=(24,6)) gs = GridSpec(1,num_expts+1) gs.update(left=0.04, right=0.99, bottom=0.12, top=0.86) for expt in range(num_expts+1): ax = subplot(gs[0,expt]) img = pcolor(salt_centres, temp_centres, ts_vals[expt,:,:], norm=norm, vmin=0, vmax=max_depth, cmap='jet') plot(salt_centres, freezing_pt, color='black', linestyle='dashed') cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted') clabel(cs, inline=1, fontsize=10, color=(0.6,0.6,0.6), fmt='%1.1f') xlim([min_salt_plot, max_salt_plot]) ylim([min_temp_plot, max_temp_plot]) ax.tick_params(axis='x', labelsize=12) ax.tick_params(axis='y', labelsize=12) if expt == 0: xlabel('Salinity (psu)', fontsize=14) ylabel(r'Temperature ($^{\circ}$C)', fontsize=14) title(str(beg_years[0]) + '-' + str(beg_years[1]), fontsize=20) elif expt == 1: title(expt_names[expt-1] + ' (' + str(end_years[0]) + '-' + str(end_years[1]) + ')', fontsize=20) else: title(expt_names[expt-1], fontsize=20) if expt == num_expts: # Add a horizontal colourbar below cbaxes = fig.add_axes([0.35, 0.05, 0.3, 0.02]) if key == 1: cbar = colorbar(img, cax=cbaxes, orientation='horizontal', ticks=[0,50,100,200,500,1000,2000,4000]) elif key == 2: cbar = colorbar(img, cax=cbaxes, orientation='horizontal', ticks=[0,50,100,200,500,1000,2000]) cbar.ax.tick_params(labelsize=14) # Add the main title if key == 1: suptitle(r'Water masses south of 65$^{\circ}$S: depth (m)', fontsize=24) elif key == 2: suptitle(r'Water masses south of 65$^{\circ}$S, zoomed into HSSW: depth (m)', fontsize=24) fig.show() if key == 1: fig.savefig('ts_distribution_full.png') elif key ==2: fig.savefig('ts_distribution_hssw.png')
def fesom_intersectgrid (mesh_path, file_path, var_name, tstep, lon_min, lon_max, lat_min, lat_max, depth_min, depth_max, num_lat, num_depth): # Build the regular FESOM grid elements = fesom_grid(mesh_path) # Read data id = Dataset(file_path, 'r') data = id.variables[var_name][tstep-1,:] # Check for vector variables that need to be unrotated if var_name in ['u', 'v']: # Read the rotated lat and lon fid = open(mesh_path + 'nod3d.out', 'r') fid.readline() lon = [] lat = [] for line in fid: tmp = line.split() lon_tmp = float(tmp[1]) lat_tmp = float(tmp[2]) if lon_tmp < -180: lon_tmp += 360 elif lon_tmp > 180: lon_tmp -= 360 lon.append(lon_tmp) lat.append(lat_tmp) fid.close() lon = array(lon) lat = array(lat) if var_name == 'u': u_data = data[:] v_data = id.variables['v'][tstep-1,:] u_data_lonlat, v_data_lonlat = unrotate_vector(lon, lat, u_data, v_data) data = u_data_lonlat[:] elif var_name == 'v': v_data = data[:] u_data = id.variables['u'][tstep-1,:] u_data_lonlat, v_data_lonlat = unrotate_vector(lon, lat, u_data, v_data) data = v_data_lonlat[:] id.close() # Build the regular grid lat_vals = linspace(lat_min, lat_max, num_lat) # Make depth positive to match the "depth" attribute in grid Nodes depth_vals = -1*linspace(depth_min, depth_max, num_depth) # Set up array of NaNs to overwrite with zonally averaged data data_reg = zeros((num_depth, num_lat)) data_reg[:,:] = NaN # Process one latitude value at a time for j in range(num_lat): inodes_lat = [] # Loop over 2D grid Elements for elm in elements: # Select elements which intersect the current latitude, and which # fall entirely between the longitude bounds if any(elm.y <= lat_vals[j]) and any(elm.y >= lat_vals[j]) and all(elm.x >= lon_min) and all(elm.x <= lon_max): # Special case where nodes (corners) of the element are exactly # at lat_vals[j] if any(elm.y == lat_vals[j]): # If exactly one of the corners is at lat_vals[j], ignore # it; this element only touches lat_vals[j] at one point # If two of the corners are at lat_vals[j], an entire side # of the element lies along the line lat_vals[j] if count_nonzero(elm.y == lat_vals[j]) == 2: # Select these two Nodes index = nonzero(elm.y == lat_vals[j]) nodes = elm.nodes[index] node1 = nodes[0] node2 = nodes[1] # Convert to IntersectNodes and add them to inodes_lat inodes_lat.append(coincide_inode(node1, depth_vals, data)) inodes_lat.append(coincide_inode(node2, depth_vals, data)) # Impossible for all three corners to be at lat_vals[j] else: # Regular case # Find the two sides of the triangular element which # intersect lat_vals[j] # For each such side, interpolate an IntersectNode between # the two endpoint nodes, and add them to inodes_lat if any(array([elm.y[0], elm.y[1]]) < lat_vals[j]) and any(array([elm.y[0], elm.y[1]]) > lat_vals[j]): inodes_lat.append(interp_inode(elm.nodes[0], elm.nodes[1], lat_vals[j], depth_vals, data)) if any(array([elm.y[1], elm.y[2]]) < lat_vals[j]) and any(array([elm.y[1], elm.y[2]]) > lat_vals[j]): inodes_lat.append(interp_inode(elm.nodes[1], elm.nodes[2], lat_vals[j], depth_vals, data)) if any(array([elm.y[0], elm.y[2]]) < lat_vals[j]) and any(array([elm.y[0], elm.y[2]]) > lat_vals[j]): inodes_lat.append(interp_inode(elm.nodes[0], elm.nodes[2], lat_vals[j], depth_vals, data)) # Sort inodes_lat by longitude (ascending) inodes_lat.sort(key=lambda inode: inode.lon) # Interpolate the variable values at each depth for k in range(num_depth): valid_lon = [] valid_var = [] for inode in inodes_lat: # Select all IntersectNodes where data exists at the current # depth level if inode.var[k] is not nan: # Only continue if an identical inode (same longitude) # hasn't already been added to valid_lon and valid_var # (this will happen on adjacent elements which share a side) if inode.lon not in valid_lon: # Save longitude and variable values valid_lon.append(inode.lon) valid_var.append(inode.var[k]) # Convert to numpy arrays so we can do math with them valid_lon = array(valid_lon) valid_var = array(valid_var) if len(valid_lon) == 0: # No valid data; leave data_reg[k,j] as NaN pass elif len(valid_lon) == 1: # Only one valid data point; save to data_reg data_reg[k,j] = valid_var[0] else: # Average over longitude # Trapezoidal rule for integration dlon = valid_lon[1:] - valid_lon[0:-1] var_centres = 0.5*(valid_var[0:-1] + valid_var[1:]) # Divide integral of var_centres*dlon by integral of dlon # to get average; save to data_reg var_avg = sum(var_centres*dlon)/sum(dlon) data_reg[k,j] = var_avg # Convert depth back to negative for plotting depth_vals = -1*depth_vals return lat_vals, depth_vals, data_reg