def avg_zeta (file_path): # Read time and grid variables file = Dataset(file_path, 'r') time = file.variables['ocean_time'][:] # Convert time from seconds to years time = time/(365*24*60*60) lon = file.variables['lon_rho'][:-15,1:-1] lat = file.variables['lat_rho'][:-15,1:-1] mask = file.variables['mask_rho'][:-15,1:-1] avg_zeta = [] # Calculate dx and dy in another script dx, dy = cartesian_grid_2d(lon, lat) # Calculate dA and mask with land mask dA = ma.masked_where(mask==0, dx*dy) for l in range(size(time)): print 'Processing timestep ' + str(l+1) + ' of ' + str(size(time)) # Read zeta at this timestep zeta = file.variables['zeta'][l,:-15,1:-1] # Calculate area-weighted average avg_zeta.append(sum(zeta*dA)/sum(dA)) file.close() # Plot results clf() plot(time, avg_zeta) xlabel('Years') ylabel('Average sea surface height (m)') show()
def paul_holland_hack(grid_file): total_fw = 1500 # Gt/y nbdry = -60 # Apply the freshwater evenly south of here sec_per_year = 365.25 * 24 * 60 * 60 # Read grid and masks, making sure to get rid of the overlapping periodic # boundary cells that are double-counted id = Dataset(grid_file, 'r') lat = id.variables['lat_rho'][:, 1:-1] lon = id.variables['lon_rho'][:, 1:-1] mask_zice = id.variables['mask_zice'][:, 1:-1] mask_rho = id.variables['mask_rho'][:, 1:-1] id.close() # Mask out land and ice shelves mask = mask_rho - mask_zice # Get differentials dx, dy = cartesian_grid_2d(lon, lat) # Open ocean cells ocn_flag = mask == 1 # Cells south of 60S loc_flag = lat < nbdry # Total area of all open ocean cells total_area = sum(dx * dy * ocn_flag) print 'Total area = ' + str(total_area) + ' m^2' # Total area of open ocean cells south of 60S target_area = sum(dx * dy * ocn_flag * loc_flag) print 'Area south of 60S = ' + str(target_area) + ' m^2' # Multiply by 1e12 to convert from Gt/y to kg/y # Divide by sec_per_year to convert from kg/y to kg/s # Divide by target area to get kg/m^2/s fw_flux = total_fw * 1e12 / target_area / sec_per_year print 'Freshwater flux to add = ' + str(fw_flux) + 'kg/m^2/s'
def calc_grid(file_path): # Read grid variables id = Dataset(file_path, 'r') lon = id.variables['lon_rho'][:, :] lat = id.variables['lat_rho'][:, :] lon_u = id.variables['lon_u'][:, :] lat_u = id.variables['lat_u'][:, :] lon_v = id.variables['lon_v'][:, :] lat_v = id.variables['lat_v'][:, :] zice = id.variables['zice'][:, :] mask_rho = id.variables['mask_rho'][:, :] id.close() # Calculate dx and dy in another script dx, dy = cartesian_grid_2d(lon_u, lat_u, lon_v, lat_v) # Calculate dA and mask with zice zice_masked = zice * mask_rho dA = ma.masked_where(zice_masked == 0, dx * dy) # Save dimensions #num_lat = size(lon, 0) #num_lon = size(lon, 1) # Make longitude values go from -180 to 180, not 0 to 360 #index = lon > 180 #lon[index] = lon[index] - 360 return dA, lon, lat
def cartesian_grid_3d (lon, lat, h, zice, theta_s, theta_b, hc, N, zeta=None): # Calculate 2D dx and dy in another script dx, dy = cartesian_grid_2d(lon, lat) # Copy into 3D arrays, same at each depth level dx = tile(dx, (N,1,1)) dy = tile(dy, (N,1,1)) # Save horizontal dimensions num_lat = size(lon, 0) num_lon = size(lon, 1) # Get a 3D array of z-coordinates; sc_r and Cs_r are unused z, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) # We have z at the midpoint of each cell, now find it on the top and # bottom edges of each cell z_edges = zeros((N+1, num_lat, num_lon)) z_edges[1:-1,:,:] = 0.5*(z[0:-1,:,:] + z[1:,:,:]) # At surface, z=zice z_edges[-1,:,:] = zice[:,:] # Add zeta if it exists if zeta is not None: z_edges[-1,:,:] += zeta[:,:] # At bottom, extrapolate z_edges[0,:,:] = 2*z[0,:,:] - z_edges[1,:,:] # Now find dz dz = z_edges[1:,:,:] - z_edges[0:-1,:,:] return dx, dy, dz, z
def total_iceshelf_area(roms_grid_file, fesom_mesh_path_lr, fesom_mesh_path_hr): id = Dataset(roms_grid_file, 'r') lon = id.variables['lon_rho'][:-15, 1:-1] lat = id.variables['lat_rho'][:-15, 1:-1] zice = id.variables['zice'][:-15, 1:-1] id.close() dx, dy = cartesian_grid_2d(lon, lat) dA = ma.masked_where(zice == 0, dx * dy) print 'MetROMS: ' + str(sum(dA)) + ' m^2' elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True, cross_180=False) area_elm_lr = zeros(len(elements_lr)) for i in range(len(elements_lr)): elm = elements_lr[i] if elm.cavity: area_elm_lr[i] = elm.area() print 'FESOM (low-res): ' + str(sum(area_elm_lr)) + ' m^2' elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True, cross_180=False) area_elm_hr = zeros(len(elements_hr)) for i in range(len(elements_hr)): elm = elements_hr[i] if elm.cavity: area_elm_hr[i] = elm.area() print 'FESOM (high-res): ' + str(sum(area_elm_hr)) + ' m^2'
def cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta=None): # Calculate 2D dx and dy in another script dx, dy = cartesian_grid_2d(lon, lat) # Copy into 3D arrays, same at each depth level dx = tile(dx, (N, 1, 1)) dy = tile(dy, (N, 1, 1)) # Save horizontal dimensions num_lat = size(lon, 0) num_lon = size(lon, 1) # Get a 3D array of z-coordinates; sc_r and Cs_r are unused z, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) # We have z at the midpoint of each cell, now find it on the top and # bottom edges of each cell z_edges = zeros((N + 1, num_lat, num_lon)) z_edges[1:-1, :, :] = 0.5 * (z[0:-1, :, :] + z[1:, :, :]) # At surface, z=zice z_edges[-1, :, :] = zice[:, :] # Add zeta if it exists if zeta is not None: z_edges[-1, :, :] += zeta[:, :] # At bottom, extrapolate z_edges[0, :, :] = 2 * z[0, :, :] - z_edges[1, :, :] # Now find dz dz = z_edges[1:, :, :] - z_edges[0:-1, :, :] return dx, dy, dz, z
def avg_zeta(file_path): # Read time and grid variables file = Dataset(file_path, 'r') time = file.variables['ocean_time'][:] # Convert time from seconds to years time = time / (365 * 24 * 60 * 60) lon = file.variables['lon_rho'][:-15, 1:-1] lat = file.variables['lat_rho'][:-15, 1:-1] mask = file.variables['mask_rho'][:-15, 1:-1] avg_zeta = [] # Calculate dx and dy in another script dx, dy = cartesian_grid_2d(lon, lat) # Calculate dA and mask with land mask dA = ma.masked_where(mask == 0, dx * dy) for l in range(size(time)): print 'Processing timestep ' + str(l + 1) + ' of ' + str(size(time)) # Read zeta at this timestep zeta = file.variables['zeta'][l, :-15, 1:-1] # Calculate area-weighted average avg_zeta.append(sum(zeta * dA) / sum(dA)) file.close() # Plot results clf() plot(time, avg_zeta) xlabel('Years') ylabel('Average sea surface height (m)') show()
def grid_res (grid_path, save=False, fig_name=None): # Degrees to radians conversion factor deg2rad = pi/180 # Read grid id = Dataset(grid_path, 'r') lon = id.variables['lon_rho'][:-15,:-1] lat = id.variables['lat_rho'][:-15,:-1] mask = id.variables['mask_rho'][:-15,:-1] id.close() # Get differentials dx, dy = cartesian_grid_2d(lon, lat) # Calculate resolution: square root of the area, converted to km res = sqrt(dx*dy)*1e-3 # Apply land mask res = ma.masked_where(mask==0, res) # Polar coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) # Colour levels lev = linspace(0, 20, num=50) # Plot fig = figure(figsize=(16,12)) fig.add_subplot(1,1,1, aspect='equal') contourf(x, y, res, lev, extend='both') cbar = colorbar() cbar.ax.tick_params(labelsize=20) title('Grid resolution (km)', fontsize=30) axis('off') if save: fig.savefig(fig_name) else: fig.show()
def grid_res (grid_path, save=False, fig_name=None): # Degrees to radians conversion factor deg2rad = pi/180 # Read grid id = Dataset(grid_path, 'r') lon = id.variables['lon_rho'][:-15,:-1] lat = id.variables['lat_rho'][:-15,:-1] mask = id.variables['mask_rho'][:-15,:-1] id.close() # Get differentials dx, dy = cartesian_grid_2d(lon, lat) # Calculate resolution: square root of the area, converted to km res = sqrt(dx*dy)*1e-3 # Apply land mask res = ma.masked_where(mask==0, res) # Polar coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) # Colour levels lev = linspace(0, amax(res), num=50) #20, num=50) # Plot fig = figure(figsize=(16,12)) fig.add_subplot(1,1,1, aspect='equal') contourf(x, y, res, lev, extend='both') cbar = colorbar() cbar.ax.tick_params(labelsize=20) title('Grid resolution (km)', fontsize=30) axis('off') if save: fig.savefig(fig_name) else: fig.show()
def calc_grid(file_path): # Read grid variables id = Dataset(file_path, 'r') lon = id.variables['lon_rho'][:-15, 1:-1] lat = id.variables['lat_rho'][:-15, 1:-1] zice = id.variables['zice'][:-15, 1:-1] id.close() # Calculate dx and dy in another script dx, dy = cartesian_grid_2d(lon, lat) # Calculate dA and mask with zice dA = ma.masked_where(zice == 0, dx * dy) # Save dimensions num_lat = size(lon, 0) num_lon = size(lon, 1) # Make longitude values go from -180 to 180, not 0 to 360 index = lon > 180 lon[index] = lon[index] - 360 return dA, lon, lat
def calc_grid (file_path): # Read grid variables id = Dataset(file_path, 'r') lon = id.variables['lon_rho'][:-15,1:-1] lat = id.variables['lat_rho'][:-15,1:-1] zice = id.variables['zice'][:-15,1:-1] id.close() # Calculate dx and dy in another script dx, dy = cartesian_grid_2d(lon, lat) # Calculate dA and mask with zice dA = ma.masked_where(zice==0, dx*dy) # Save dimensions num_lat = size(lon, 0) num_lon = size(lon, 1) # Make longitude values go from -180 to 180, not 0 to 360 index = lon > 180 lon[index] = lon[index] - 360 return dA, lon, lat
def mip_grid_res (roms_grid_file, fesom_mesh_low, fesom_mesh_high, save=False, fig_name=None): # Spatial bounds on plot lat_max = -63 + 90 # Bounds on colour scale (km) limits = [0, 20] # Degrees to radians conversion factor deg2rad = pi/180 # FESOM plotting parameters circumpolar = True print 'Processing ROMS' # Read ROMS grid id = Dataset(roms_grid_file, 'r') roms_lon = id.variables['lon_rho'][:,:] roms_lat = id.variables['lat_rho'][:,:] roms_mask = id.variables['mask_rho'][:,:] id.close() # Get differentials roms_dx, roms_dy = cartesian_grid_2d(roms_lon, roms_lat) # Calculate resolution: square root of the area, converted to km roms_res = sqrt(roms_dx*roms_dy)*1e-3 # Apply land mask roms_res = ma.masked_where(roms_mask==0, roms_res) # Polar coordinates for plotting roms_x = -(roms_lat+90)*cos(roms_lon*deg2rad+pi/2) roms_y = (roms_lat+90)*sin(roms_lon*deg2rad+pi/2) print 'Processing FESOM low-res' # Build triangular patches for each element elements_low, patches_low = make_patches(fesom_mesh_low, circumpolar) # Calculate the resolution at each element fesom_res_low = [] for elm in elements_low: fesom_res_low.append(sqrt(elm.area())*1e-3) print 'Processing FESOM high-res' # Build triangular patches for each element elements_high, patches_high = make_patches(fesom_mesh_high, circumpolar) # Calculate the resolution at each element fesom_res_high = [] for elm in elements_high: fesom_res_high.append(sqrt(elm.area())*1e-3) print 'Plotting' fig = figure(figsize=(27,9)) # ROMS ax1 = fig.add_subplot(1,3,1, aspect='equal') pcolor(roms_x, roms_y, roms_res, vmin=limits[0], vmax=limits[1], cmap='jet') xlim([-lat_max, lat_max]) ylim([-lat_max, lat_max]) ax1.set_xticks([]) ax1.set_yticks([]) title('a) MetROMS', fontsize=28) # FESOM low-res ax2 = fig.add_subplot(1,3,2, aspect='equal') img_low = PatchCollection(patches_low, cmap='jet') img_low.set_array(array(fesom_res_low)) img_low.set_clim(vmin=limits[0], vmax=limits[1]) img_low.set_edgecolor('face') ax2.add_collection(img_low) xlim([-lat_max, lat_max]) ylim([-lat_max, lat_max]) ax2.set_xticks([]) ax2.set_yticks([]) title('b) FESOM low-res', fontsize=28) # FESOM high-res ax3 = fig.add_subplot(1,3,3, aspect='equal') img_high = PatchCollection(patches_high, cmap='jet') img_high.set_array(array(fesom_res_high)) img_high.set_clim(vmin=limits[0], vmax=limits[1]) img_high.set_edgecolor('face') ax3.add_collection(img_high) xlim([-lat_max, lat_max]) ylim([-lat_max, lat_max]) ax3.set_xticks([]) ax3.set_yticks([]) title('c) FESOM high-res', fontsize=28) cbaxes = fig.add_axes([0.92, 0.2, 0.01, 0.6]) cbar = colorbar(img_high, cax=cbaxes, extend='max', ticks=arange(limits[0], limits[1]+5, 5)) cbar.ax.tick_params(labelsize=24) suptitle('Horizontal grid resolution (km)', fontsize=36) subplots_adjust(wspace=0.05) if save: fig.savefig(fig_name) else: fig.show()
def timeseries_seaice (file_path, log_path): time = [] total_area = [] total_volume = [] # Check if the log file exists if exists(log_path): print 'Reading previously calculated values' f = open(log_path, 'r') # Skip first line (header for time array) f.readline() for line in f: try: time.append(float(line)) except(ValueError): # Reached the header for the next variable break for line in f: try: total_area.append(float(line)) except(ValueError): break for line in f: total_volume.append(float(line)) f.close() print 'Analysing grid' id = Dataset(file_path, 'r') lon = id.variables['TLON'][:-15,:] lat = id.variables['TLAT'][:-15,:] # Calculate area on the tracer grid dx, dy = cartesian_grid_2d(lon, lat) dA = dx*dy # Read time values and convert from days to years new_time = id.variables['time'][:]/365.25 # Concatenate with time values from log file for t in range(size(new_time)): time.append(new_time[t]) print 'Reading data' # Read sea ice concentration and height # Throw away overlapping periodic boundary and northern sponge layer aice = id.variables['aice'][:,:-15,:] hi = id.variables['hi'][:,:-15,:] id.close() print 'Setting up arrays' # Remove masks and fill with zeros (was having weird masking issues here) aice_nomask = aice.data aice_nomask[aice.mask] = 0.0 hi_nomask = hi.data hi_nomask[hi.mask] = 0.0 # Build timeseries for t in range(size(new_time)): # Integrate area and convert to million km^2 total_area.append(sum(aice_nomask[t,:,:]*dA)*1e-12) # Integrate volume and convert to million km^3 total_volume.append(sum(aice_nomask[t,:,:]*hi_nomask[t,:,:]*dA)*1e-12) print 'Plotting total sea ice area' clf() plot(time, total_area) xlabel('Years') ylabel(r'Total Sea Ice Area (million km$^2$)') grid(True) savefig('seaice_area.png') print 'Plotting total sea ice volume' clf() plot(time, total_volume) xlabel('Years') ylabel(r'Total Sea Ice Volume (million km$^3$)') grid(True) savefig('seaice_volume.png') print 'Saving results to log file' f = open(log_path, 'w') f.write('Time (years):\n') for elm in time: f.write(str(elm) + '\n') f.write('Total Sea Ice Area (million km^2):\n') for elm in total_area: f.write(str(elm) + '\n') f.write('Total Sea Ice Volume (million km^3):\n') for elm in total_volume: f.write(str(elm) + '\n') f.close()
def seaice_budget_thermo(cice_file, roms_grid, save=False, fig_names=None): # Read bathymetry values for ROMS grid id = Dataset(roms_grid, 'r') h = id.variables['h'][1:-1, 1:-1] id.close() # Read CICE grid id = Dataset(cice_file, 'r') lon = id.variables['TLON'][:, :] lat = id.variables['TLAT'][:, :] # Calculate elements of area dx, dy = cartesian_grid_2d(lon, lat) dA = dx * dy # Read time values time = id.variables['time'][:] / 365.25 # Read all the fields we need aice = id.variables['aice'][:, :, :] congel = id.variables['congel'][:, :, :] frazil = id.variables['frazil'][:, :, :] snoice = id.variables['snoice'][:, :, :] meltt = -1 * id.variables['meltt'][:, :, :] meltb = -1 * id.variables['meltb'][:, :, :] meltl = -1 * id.variables['meltl'][:, :, :] id.close() # Create masks for shelf and offshore region shelf = (lat < -60) * (h < 1500) offshore = invert(shelf) congel_shelf = [] frazil_shelf = [] snoice_shelf = [] meltt_shelf = [] meltb_shelf = [] meltl_shelf = [] congel_offshore = [] frazil_offshore = [] snoice_offshore = [] meltt_offshore = [] meltb_offshore = [] meltl_offshore = [] # Loop over timesteps for t in range(size(time)): # Only average over regions with at least 10% sea ice aice_flag = aice[t, :, :] > 0.1 # Congelation averaged over the continental shelf congel_shelf.append( sum(congel[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Frazil ice formation averaged over the continental shelf frazil_shelf.append( sum(frazil[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Snow-to-ice flooding averaged over the continental shelf snoice_shelf.append( sum(snoice[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Top melt averaged over the continental shelf meltt_shelf.append( sum(meltt[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Basal melt averaged over the continental shelf meltb_shelf.append( sum(meltb[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Lateral melt averaged over the continental shelf meltl_shelf.append( sum(meltl[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Congelation averaged over the offshore region congel_offshore.append( sum(congel[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Frazil ice formation averaged over the offshore region frazil_offshore.append( sum(frazil[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Snow-to-ice flooding averaged over the offshore region snoice_offshore.append( sum(snoice[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Top melt averaged over the offshore region meltt_offshore.append( sum(meltt[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Basal melt averaged over the offshore region meltb_offshore.append( sum(meltb[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Lateral melt averaged over the offshore region meltl_offshore.append( sum(meltl[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Convert to arrays and sum to get total volume tendency for each region congel_shelf = array(congel_shelf) frazil_shelf = array(frazil_shelf) snoice_shelf = array(snoice_shelf) meltt_shelf = array(meltt_shelf) meltb_shelf = array(meltb_shelf) meltl_shelf = array(meltl_shelf) total_shelf = congel_shelf + frazil_shelf + snoice_shelf + meltt_shelf + meltb_shelf + meltl_shelf congel_offshore = array(congel_offshore) frazil_offshore = array(frazil_offshore) snoice_offshore = array(snoice_offshore) meltt_offshore = array(meltt_offshore) meltb_offshore = array(meltb_offshore) meltl_offshore = array(meltl_offshore) total_offshore = congel_offshore + frazil_offshore + snoice_offshore + meltt_offshore + meltb_offshore + meltl_offshore # Legends need small font to fit fontP = FontProperties() fontP.set_size('small') # Set up continental shelf plot fig1, ax1 = subplots(figsize=(8, 6)) # Add one timeseries at a time ax1.plot(time, congel_shelf, label='Congelation', color='blue', linewidth=2) ax1.plot(time, frazil_shelf, label='Frazil', color='red', linewidth=2) ax1.plot(time, snoice_shelf, label='Snow-to-ice', color='cyan', linewidth=2) ax1.plot(time, meltt_shelf, label='Top melt', color='magenta', linewidth=2) ax1.plot(time, meltb_shelf, label='Basal melt', color='green', linewidth=2) ax1.plot(time, meltl_shelf, label='Lateral melt', color='yellow', linewidth=2) ax1.plot(time, total_shelf, label='Total', color='black', linewidth=2) # Configure plot title('Volume tendency averaged over continental shelf') xlabel('Time (years)') ylabel('cm/day') grid(True) # Add a legend ax1.legend(loc='upper left', prop=fontP) if save: fig1.savefig(fig_names[0]) else: fig1.show() # Same for offshore plot fig2, ax2 = subplots(figsize=(8, 6)) ax2.plot(time, congel_offshore, label='Congelation', color='blue', linewidth=2) ax2.plot(time, frazil_offshore, label='Frazil', color='red', linewidth=2) ax2.plot(time, snoice_offshore, label='Snow-to-ice', color='cyan', linewidth=2) ax2.plot(time, meltt_offshore, label='Top melt', color='magenta', linewidth=2) ax2.plot(time, meltb_offshore, label='Basal melt', color='green', linewidth=2) ax2.plot(time, meltl_offshore, label='Lateral melt', color='yellow', linewidth=2) ax2.plot(time, total_offshore, label='Total', color='black', linewidth=2) title('Volume tendency averaged over offshore region') xlabel('Time (years)') ylabel('cm/day') grid(True) ax2.legend(loc='lower right', prop=fontP) if save: fig2.savefig(fig_names[1]) else: fig2.show() # Get cumulative sums of each term congel_shelf_cum = cumsum(congel_shelf) * 5 frazil_shelf_cum = cumsum(frazil_shelf) * 5 snoice_shelf_cum = cumsum(snoice_shelf) * 5 meltt_shelf_cum = cumsum(meltt_shelf) * 5 meltb_shelf_cum = cumsum(meltb_shelf) * 5 meltl_shelf_cum = cumsum(meltl_shelf) * 5 total_shelf_cum = cumsum(total_shelf) * 5 congel_offshore_cum = cumsum(congel_offshore) * 5 frazil_offshore_cum = cumsum(frazil_offshore) * 5 snoice_offshore_cum = cumsum(snoice_offshore) * 5 meltt_offshore_cum = cumsum(meltt_offshore) * 5 meltb_offshore_cum = cumsum(meltb_offshore) * 5 meltl_offshore_cum = cumsum(meltl_offshore) * 5 total_offshore_cum = cumsum(total_offshore) * 5 # Continental shelf cumulative plot fig3, ax3 = subplots(figsize=(8, 6)) ax3.plot(time, congel_shelf_cum, label='Congelation', color='blue', linewidth=2) ax3.plot(time, frazil_shelf_cum, label='Frazil', color='red', linewidth=2) ax3.plot(time, snoice_shelf_cum, label='Snow-to-ice', color='cyan', linewidth=2) ax3.plot(time, meltt_shelf_cum, label='Top melt', color='magenta', linewidth=2) ax3.plot(time, meltb_shelf_cum, label='Basal melt', color='green', linewidth=2) ax3.plot(time, meltl_shelf_cum, label='Lateral melt', color='yellow', linewidth=2) ax3.plot(time, total_shelf_cum, label='Total', color='black', linewidth=2) title('Cumulative volume tendency averaged over continental shelf') xlabel('Time (years)') ylabel('cm') grid(True) ax3.legend(loc='lower left', prop=fontP) if save: fig3.savefig(fig_names[2]) else: fig3.show() # Offshore cumulative plot fig4, ax4 = subplots(figsize=(8, 6)) ax4.plot(time, congel_offshore_cum, label='Congelation', color='blue', linewidth=2) ax4.plot(time, frazil_offshore_cum, label='Frazil', color='red', linewidth=2) ax4.plot(time, snoice_offshore_cum, label='Snow-to-ice', color='cyan', linewidth=2) ax4.plot(time, meltt_offshore_cum, label='Top melt', color='magenta', linewidth=2) ax4.plot(time, meltb_offshore_cum, label='Basal melt', color='green', linewidth=2) ax4.plot(time, meltl_offshore_cum, label='Lateral melt', color='yellow', linewidth=2) ax4.plot(time, total_offshore_cum, label='Total', color='black', linewidth=2) title('Cumulative volume tendency averaged over offshore region') xlabel('Time (years)') ylabel('cm/day') grid(True) ax4.legend(loc='lower left', prop=fontP) if save: fig4.savefig(fig_names[3]) else: fig4.show()
def mip_seaice_tamura (): # File paths # ROMS grid (just for bathymetry) roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' # FESOM mesh paths fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' # CICE 1992-2013 mean ice production (precomputed in calc_ice_prod.py) cice_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/ice_prod_1992_2013.nc' # FESOM 1992-2013 mean ice production (precomputed in calc_annual_ice_prod.py in fesomtools) fesom_lr_file = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/ice_prod_1992_2013.nc' fesom_hr_file = '/short/y99/kaa561/FESOM/intercomparison_highres/output/ice_prod_1992_2013.nc' # Tamura's 1992-2013 mean ice production (precomputed on desktop with Matlab) tamura_file = '/short/m68/kaa561/tamura_1992_2013_monthly_climatology.nc' # Output ASCII file output_file = 'seaice_prod_bins.log' # Size of longitude bin dlon_bin = 1.0 # Definition of continental shelf: everywhere south of lat0 with # bathymetry shallower than h0 lat0 = -60 h0 = 1500 # Radius of the Earth in metres r = 6.371e6 # Degrees to radians conversion factor deg2rad = pi/180.0 # Set up longitude bins bin_edges = arange(-180, 180+dlon_bin, dlon_bin) bin_centres = 0.5*(bin_edges[:-1] + bin_edges[1:]) num_bins = len(bin_centres) print 'Processing MetROMS' # Read CICE grid id = Dataset(cice_file, 'r') cice_lon = id.variables['TLON'][:,:] cice_lat = id.variables['TLAT'][:,:] # Read sea ice production cice_data = id.variables['ice_prod'][:,:] id.close() # Get area integrands dx, dy = cartesian_grid_2d(cice_lon, cice_lat) dA = dx*dy # Make sure longitude is in the range [-180, 180] index = cice_lon > 180 cice_lon[index] = cice_lon[index] - 360 # Read bathymetry (ROMS grid file) and trim to CICE grid id = Dataset(roms_grid, 'r') cice_bathy = id.variables['h'][1:-1,1:-1] id.close() # Set up integral cice_data_bins = zeros(num_bins) # Loop over all cells num_lon = size(cice_lon,1) num_lat = size(cice_lat,0) for j in range(num_lat): for i in range(num_lon): # Check for land mask or ice shelves if cice_data[j,i] is ma.masked: continue # Check for continental shelf if cice_lat[j,i] < lat0 and cice_bathy[j,i] < h0: # Find the right bin bin_index = nonzero(bin_edges > cice_lon[j,i])[0][0] - 1 # Integrate (m^3/y) cice_data_bins[bin_index] += cice_data[j,i]*dA[j,i] # Convert to 10^9 m^3/y cice_data_bins *= 1e-9 print 'Processing low-res FESOM' # Build mesh elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True, cross_180=False) # Read sea ice production id = Dataset(fesom_lr_file, 'r') fesom_data_lr = id.variables['ice_prod'][:] id.close() # Set up integral fesom_data_bins_lr = zeros(num_bins) # Loop over elements for elm in elements_lr: # Exclude ice shelf cavities if not elm.cavity: # Check for continental shelf in 2 steps if all(elm.lat < lat0): elm_bathy = mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) if elm_bathy < h0: # Get element-averaged sea ice production elm_data = mean([fesom_data_lr[elm.nodes[0].id], fesom_data_lr[elm.nodes[1].id], fesom_data_lr[elm.nodes[2].id]]) # Find the right bin elm_lon = mean(elm.lon) if elm_lon < -180: elm_lon += 360 elif elm_lon > 180: elm_lon -= 360 bin_index = nonzero(bin_edges > elm_lon)[0][0] - 1 # Integrate (m^3/y) fesom_data_bins_lr[bin_index] += elm_data*elm.area() # Convert to 10^9 m^3/y fesom_data_bins_lr *= 1e-9 print 'Processing high-res FESOM' elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True, cross_180=False) id = Dataset(fesom_hr_file, 'r') fesom_data_hr = id.variables['ice_prod'][:] id.close() fesom_data_bins_hr = zeros(num_bins) for elm in elements_hr: if not elm.cavity: if all(elm.lat < lat0): elm_bathy = mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) if elm_bathy < h0: elm_data = mean([fesom_data_hr[elm.nodes[0].id], fesom_data_hr[elm.nodes[1].id], fesom_data_hr[elm.nodes[2].id]]) elm_lon = mean(elm.lon) if elm_lon < -180: elm_lon += 360 elif elm_lon > 180: elm_lon -= 360 bin_index = nonzero(bin_edges > elm_lon)[0][0] - 1 fesom_data_bins_hr[bin_index] += elm_data*elm.area() fesom_data_bins_hr *= 1e-9 print 'Processing Tamura obs' id = Dataset(tamura_file, 'r') # Read grid and data tamura_lon = id.variables['longitude'][:,:] tamura_lat = id.variables['latitude'][:,:] # Read sea ice formation tamura_data = id.variables['ice_prod'][:,:] id.close() # Interpolate to a regular grid so we can easily integrate over area dlon_reg = 0.2 dlat_reg = 0.1 lon_reg_edges = arange(-180, 180+dlon_reg, dlon_reg) lon_reg = 0.5*(lon_reg_edges[:-1] + lon_reg_edges[1:]) lat_reg_edges = arange(-80, -60+dlat_reg, dlat_reg) lat_reg = 0.5*(lat_reg_edges[:-1] + lat_reg_edges[1:]) lon_reg_2d, lat_reg_2d = meshgrid(lon_reg, lat_reg) dx_reg = r*cos(lat_reg_2d*deg2rad)*dlon_reg*deg2rad dy_reg = r*dlat_reg*deg2rad dA_reg = dx_reg*dy_reg # Be careful with the periodic boundary here num_pts = size(tamura_lon) num_wrap1 = count_nonzero(tamura_lon < -179) num_wrap2 = count_nonzero(tamura_lon > 179) points = empty([num_pts+num_wrap1+num_wrap2,2]) values = empty(num_pts+num_wrap1+num_wrap2) points[:num_pts,0] = ravel(tamura_lon) points[:num_pts,1] = ravel(tamura_lat) values[:num_pts] = ravel(tamura_data) # Wrap the periodic boundary on both sides index = tamura_lon < -179 points[num_pts:num_pts+num_wrap1,0] = tamura_lon[index] + 360 points[num_pts:num_pts+num_wrap1,1] = tamura_lat[index] values[num_pts:num_pts+num_wrap1] = tamura_data[index] index = tamura_lon > 179 points[num_pts+num_wrap1:,0] = tamura_lon[index] - 360 points[num_pts+num_wrap1:,1] = tamura_lat[index] values[num_pts+num_wrap1:] = tamura_data[index] values = ma.masked_where(isnan(values), values) xi = empty([size(lon_reg_2d),2]) xi[:,0] = ravel(lon_reg_2d) xi[:,1] = ravel(lat_reg_2d) result = griddata(points, values, xi) tamura_data_reg = reshape(result, shape(lon_reg_2d)) # Now, regrid the MetROMS bathymetry to this regular grid num_pts = size(cice_lon) num_wrap1 = count_nonzero(cice_lon < -179) num_wrap2 = count_nonzero(cice_lon > 179) points = empty([num_pts+num_wrap1+num_wrap2,2]) values = empty(num_pts+num_wrap1+num_wrap2) points[:num_pts,0] = ravel(cice_lon) points[:num_pts,1] = ravel(cice_lat) values[:num_pts] = ravel(cice_bathy) index = cice_lon < -179 points[num_pts:num_pts+num_wrap1,0] = cice_lon[index] + 360 points[num_pts:num_pts+num_wrap1,1] = cice_lat[index] values[num_pts:num_pts+num_wrap1] = cice_bathy[index] index = cice_lon > 179 points[num_pts+num_wrap1:,0] = cice_lon[index] - 360 points[num_pts+num_wrap1:,1] = cice_lat[index] values[num_pts+num_wrap1:] = cice_bathy[index] values = ma.masked_where(isnan(values), values) xi = empty([size(lon_reg_2d),2]) xi[:,0] = ravel(lon_reg_2d) xi[:,1] = ravel(lat_reg_2d) result = griddata(points, values, xi) bathy_reg = reshape(result, shape(lon_reg_2d)) # Mask everything but the continental shelf from dA_reg dA_reg = ma.masked_where(lat_reg_2d > lat0, dA_reg) dA_reg = ma.masked_where(bathy_reg > h0, dA_reg) # Mask the land mask (and ice shelves) from tamura_data_reg tamura_data_reg = ma.masked_where(isnan(tamura_data_reg), tamura_data_reg) # Set up integral tamura_data_bins = zeros(num_bins) # Loop over longitude only for i in range(len(lon_reg)): # Find the right bin bin_index = nonzero(bin_edges > lon_reg[i])[0][0] - 1 # Integrate (m^3/y) tamura_data_bins[bin_index] += sum(tamura_data_reg[:,i]*dA_reg[:,i]) # Convert to 10^9 m^3/y tamura_data_bins *= 1e-9 # Write data to ASCII file print 'Writing to file' f = open(output_file, 'w') f.write('Longitude:\n') for val in bin_centres: f.write(str(val) + '\n') f.write('MetROMS sea ice production (10^9 m^3/y):\n') for val in cice_data_bins: f.write(str(val) + '\n') f.write('FESOM (low-res) sea ice production (10^9 m^3/y):\n') for val in fesom_data_bins_lr: f.write(str(val) + '\n') f.write('FESOM (high-res) sea ice production (10^9 m^3/y):\n') for val in fesom_data_bins_hr: f.write(str(val) + '\n') f.write('Tamura sea ice production (10^9 m^3/y):\n') for val in tamura_data_bins: f.write(str(val) + '\n') f.close()
def timeseries_i2osalt(file_path, log_path): # Density of freshwater rho_fw = 1000.0 # Density of seawater rho_sw = 1025.0 # Conversion from m/s to cm/day mps_to_cmpday = 8.64e6 time = [] avg_ssflux = [] # Check if the log file exists if exists(log_path): print 'Reading previously calculated values' f = open(log_path, 'r') # Skip first line (header for time array) f.readline() for line in f: try: time.append(float(line)) except (ValueError): # Reached the header for the next variable break for line in f: avg_ssflux.append(float(line)) f.close() print 'Analysing grid' id = Dataset(file_path, 'r') lon = id.variables['TLON'][:-15, :] lat = id.variables['TLAT'][:-15, :] # Calculate area on the tracer grid dx, dy = cartesian_grid_2d(lon, lat) dA = dx * dy # Read time values and convert from days to years new_time = id.variables['time'][:] / 365.25 # Concatenate with time values from log file for t in range(size(new_time)): time.append(new_time[t]) print 'Reading data' # Read freshwater, salt, and rain fluxes (all scaled by aice) and # sea surface salinity # Throw away northern sponge layer fresh_ai = id.variables['fresh_ai'][:, :-15, :] sss = id.variables['sss'][:, :-15, :] rain_ai = id.variables['rain_ai'][:, :-15, :] fsalt_ai = id.variables['fsalt_ai'][:, :-15, :] id.close() # Build timeseries for t in range(size(new_time)): # Merge CICE's freshwater and salt fluxes as in set_vbc.F # Subtract rain because we don't care about that # Convert to kg/m^2/s avg_ssflux.append( sum(-1 / rho_fw * ((fresh_ai[t, :, :] - rain_ai[t, :, :]) * sss[t, :, :] * rho_sw / mps_to_cmpday - fsalt_ai[t, :, :] * 1e3) * dA) / sum(dA)) print 'Plotting' clf() plot(time, avg_ssflux) xlabel('Years') ylabel(r'Average sea ice to ocean salt flux (kg/m$^2$/s)') grid(True) savefig('avg_i2osalt.png') print 'Saving results to log file' f = open(log_path, 'w') f.write('Time (years):\n') for elm in time: f.write(str(elm) + '\n') f.write('Average sea ice to ocean salt flux (kg/m^2/s):\n') for elm in avg_ssflux: f.write(str(elm) + '\n') f.close()
def holland_fig1(grid_path, file_path): deg2rad = pi / 180.0 # Read grid id = Dataset(grid_path, 'r') lon = id.variables['lon_rho'][:-15, 1:] lat = id.variables['lat_rho'][:-15, 1:] h = id.variables['h'][:-15, 1:] zice = id.variables['zice'][:-15, 1:] angle = id.variables['angle'][:-15, :] id.close() # Set up figure x = -(lat + 90) * cos(lon * deg2rad + pi / 2) y = (lat + 90) * sin(lon * deg2rad + pi / 2) fig = figure(figsize=(16, 12)) # Barotropic streamfunction # First read bartropic velocity vector id = Dataset(file_path, 'r') ubar_xy = mean(id.variables['ubar'][:, :-15, :], axis=0) vbar_xy = mean(id.variables['vbar'][:, :-15, :], axis=0) id.close() # Rotate to lon-lat space ubar, vbar = rotate_vector_roms(ubar_xy, vbar_xy, angle) # Throw away the overlapping periodic boundary ubar = ubar[:, 1:] # Mask ice shelves ubar = ma.masked_where(zice != 0, ubar) # Water column thickness wct = h + zice # Horizontal differentials dx, dy = cartesian_grid_2d(lon, lat) # Indefinite integral from south to north of u*dz*dy, convert to Sv baro_strf = cumsum(ubar * wct * dy, axis=0) * 1e-6 # Colour levels lev1 = arange(-50, 150 + 10, 10) # Plot ax1 = fig.add_subplot(2, 2, 1, aspect='equal') img = contourf(x, y, baro_strf, lev1, extend='both') # Contour 0 Sv in black contour(x, y, baro_strf, levels=[0], colors=('black')) title('Barotropic streamfunction (Sv)', fontsize=24) xlim([-35, 39]) ylim([-35, 39]) axis('off') cbaxes1 = fig.add_axes([0.07, 0.6, 0.02, 0.3]) cbar1 = colorbar(img, ticks=arange(-50, 150 + 50, 50), cax=cbaxes1) cbar1.ax.tick_params(labelsize=16) # JJA mixed layer depth start_month = 6 # Start in June end_month = 8 # End in August start_day = 1 # First day in June next_startday = 1 # First day in September end_day = 31 # Last day in August prev_endday = 31 # Last day in May ndays_season = 92 # Number of days in June+July+August id = Dataset(file_path, 'r') # Read time axis and get dates time_id = id.variables['ocean_time'] time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) # Find the last timestep we care about end_t = -1 # Missing value flag for t in range(size(time) - 1, -1, -1): if time[t].month == end_month and time[t].day in range( end_day - 2, end_day + 1): end_t = t break if time[t].month == end_month + 1 and time[t].day in range( next_startday, next_startday + 2): end_t = t break # Make sure we actually found it if end_t == -1: print 'Error: ' + file_path + ' does not contain a complete JJA' return # Find the first timestep we care about start_t = -1 # Missing value flag for t in range(end_t, -1, -1): if time[t].month == start_month - 1 and time[t].day in range( prev_endday - 1, prev_endday + 1): start_t = t break if time[t].month == start_month and time[t].day in range( start_day, start_day + 3): start_t = t break # Make sure we found it if start_t == -1: print 'Error: ' + file_path + ' does not contain a complete JJA' return # Initialise time-averaged KPP boundary layer depth hsbl = ma.empty(shape(lon)) hsbl[:, :] = 0.0 ndays = 0 # Figure out how many of the 5 days represented in start_t we care about if time[start_t].month == start_month and time[ start_t].day == start_day + 2: start_days = 5 elif time[start_t].month == start_month and time[ start_t].day == start_day + 1: start_days = 4 elif time[start_t].month == start_month and time[start_t].day == start_day: start_days = 3 elif time[start_t].month == start_month - 1 and time[ start_t].day == prev_endday: start_days = 2 elif time[start_t].month == start_month - 1 and time[ start_t].day == prev_endday - 1: start_days = 1 else: print 'Error: starting index is month ' + str( time[start_t].month) + ', day ' + str(time[start_t].day) return # Integrate Hsbl weighted by start_days hsbl += id.variables['Hsbl'][start_t, :-15, 1:] * start_days ndays += start_days # Between start_t and end_t, we care about all the days for t in range(start_t + 1, end_t): hsbl += id.variables['Hsbl'][t, :-15, 1:] * 5 ndays += 5 # Figure out how many of the 5 days represented in end_t we care about if time[end_t].month == end_month + 1 and time[ end_t].day == next_startday + 1: end_days = 1 elif time[end_t].month == end_month + 1 and time[ end_t].day == next_startday: end_days = 2 elif time[end_t].month == end_month and time[end_t].day == end_day: end_days = 3 elif time[end_t].month == end_month and time[end_t].day == end_day - 1: end_days = 4 elif time[end_t].month == end_month and time[end_t].day == end_day - 2: end_days = 5 else: print 'Error: ending index is month ' + str( time[end_t].month) + ', day ' + str(time[end_t].day) return # Integrate weighted by end_days hsbl += id.variables['Hsbl'][end_t, :-15, 1:] * end_days ndays += end_days if ndays != ndays_season: print 'Error: found ' + str(ndays) + ' days instead of ' + str( ndays_season) return id.close() # Convert from integral to average hsbl[:, :] = hsbl[:, :] / ndays # Mask out ice shelves, change sign, and call it mixed layer depth mld = ma.masked_where(zice != 0, -hsbl) # Colour levels lev2 = arange(0, 300 + 25, 25) # Plot ax2 = fig.add_subplot(2, 2, 2, aspect='equal') img = contourf(x, y, mld, lev2, extend='both') # Contour 100 m in black contour(x, y, mld, levels=[100], colors=('black')) title('Winter mixed layer depth (m)', fontsize=24) xlim([-35, 39]) ylim([-35, 39]) axis('off') cbaxes2 = fig.add_axes([0.9, 0.6, 0.02, 0.3]) cbar2 = colorbar(img, ticks=arange(0, 300 + 100, 100), cax=cbaxes2) cbar2.ax.tick_params(labelsize=16) # Bottom water temperature id = Dataset(file_path, 'r') bwtemp = mean(id.variables['temp'][:, 0, :-15, 1:], axis=0) id.close() # Mask ice shelves bwtemp = ma.masked_where(zice != 0, bwtemp) # Colour levels lev3 = arange(-2, 2 + 0.2, 0.2) # Plot ax3 = fig.add_subplot(2, 2, 3, aspect='equal') img = contourf(x, y, bwtemp, lev3, extend='both') # Contour 0C in black contour(x, y, bwtemp, levels=[0], colors=('black')) title(r'Bottom temperature ($^{\circ}$C', fontsize=24) xlim([-35, 39]) ylim([-35, 39]) axis('off') cbaxes3 = fig.add_axes([0.07, 0.1, 0.02, 0.3]) cbar3 = colorbar(img, ticks=arange(-2, 2 + 1, 1), cax=cbaxes3) cbar3.ax.tick_params(labelsize=16) # Bottom water salinity id = Dataset(file_path, 'r') bwsalt = mean(id.variables['salt'][:, 0, :-15, 1:], axis=0) bwsalt = ma.masked_where(zice != 0, bwsalt) id.close() lev4 = arange(34.5, 34.8 + 0.025, 0.025) ax4 = fig.add_subplot(2, 2, 4, aspect='equal') img = contourf(x, y, bwsalt, lev4, extend='both') # Contour 34.65 psu in black contour(x, y, bwsalt, levels=[34.65], colors=('black')) title('Bottom salinity (psu)', fontsize=24) xlim([-35, 39]) ylim([-35, 39]) axis('off') cbaxes4 = fig.add_axes([0.9, 0.1, 0.02, 0.3]) cbar4 = colorbar(img, ticks=arange(34.5, 34.8 + 0.1, 0.1), cax=cbaxes4) cbar4.ax.tick_params(labelsize=16) fig.show()
def timeseries_sss (file_path, log_path): time = [] avg_sss = [] avg_ssflux = [] avg_restore = [] # Check if the log file exists if exists(log_path): print 'Reading previously calculated values' f = open(log_path, 'r') # Skip first line (header for time array) f.readline() for line in f: try: time.append(float(line)) except(ValueError): # Reached the header for the next variable break for line in f: try: avg_sss.append(float(line)) except(ValueError): break for line in f: try: avg_ssflux.append(float(line)) except(ValueError): break for line in f: avg_restore.append(float(line)) f.close() print 'Analysing grid' id = Dataset(file_path, 'r') lon = id.variables['lon_rho'][:-15,1:-1] lat = id.variables['lat_rho'][:-15,1:-1] zice = id.variables['zice'][:-15,1:-1] # Calculate area on the tracer grid and mask ice shelves dx, dy = cartesian_grid_2d(lon, lat) dA = ma.masked_where(zice!=0, dx*dy) # Read time values and convert from seconds to years new_time = id.variables['ocean_time'][:]/(365.25*24*60*60) # Concatenate with time values from log file for t in range(size(new_time)): time.append(new_time[t]) print 'Reading data' # Read surface salinity, salt flux, and restoring flux # Throw away overlapping periodic boundary and northern sponge layer sss = id.variables['salt'][:,-1,:-15,1:-1] ssflux = id.variables['ssflux'][:,:-15,1:-1] ssflux_restoring = id.variables['ssflux_restoring'][:,:-15,1:-1] id.close() # Build timeseries for t in range(size(new_time)): avg_sss.append(sum(sss[t,:,:]*dA)/sum(dA)) avg_ssflux.append(sum(ssflux[t,:,:]*dA)/sum(dA)) avg_restore.append(sum(ssflux_restoring[t,:,:]*dA)/sum(dA)) print 'Plotting' clf() plot(time, avg_sss) xlabel('Years') ylabel('Average sea surface salinity (psu)') grid(True) savefig('avg_sss.png') clf() plot(time, avg_ssflux) xlabel('Years') ylabel(r'Average surface salt flux (kg/m$^2$/s)') grid(True) savefig('avg_ssflux.png') clf() plot(time, avg_restore) xlabel('Years') ylabel(r'Average surface salt flux from salinity restoring (kg/m$^2$/s)') grid(True) savefig('avg_restore.png') print 'Saving results to log file' f = open(log_path, 'w') f.write('Time (years):\n') for elm in time: f.write(str(elm) + '\n') f.write('Average sea surface salinity (psu):\n') for elm in avg_sss: f.write(str(elm) + '\n') f.write('Average surface salt flux (kg/m^2/s):\n') for elm in avg_ssflux: f.write(str(elm) + '\n') f.write('Average surface salt flux from salinity restoring (kg/m^2/s):\n') for elm in avg_restore: f.write(str(elm) + '\n') f.close()
def timeseries_seaice_extent (file_path, log_path): time = [] extent = [] # Check if the log file exists if exists(log_path): print 'Reading previously calculated values' f = open(log_path, 'r') # Skip first line (header for time array) f.readline() for line in f: try: time.append(float(line)) except(ValueError): # Reached the header for the next variable break for line in f: extent.append(float(line)) f.close() print 'Analysing grid' id = Dataset(file_path, 'r') lon = id.variables['TLON'][:-15,:] lat = id.variables['TLAT'][:-15,:] # Calculate area on the tracer grid dx, dy = cartesian_grid_2d(lon, lat) dA = dx*dy # Read time values and convert from days to years new_time = id.variables['time'][:]/365.25 # Concatenate with time values from log file for t in range(size(new_time)): time.append(new_time[t]) print 'Reading data' # Read sea ice concentration # Throw away northern sponge layer aice = id.variables['aice'][:,:-15,:] id.close() # Select cells with concentration >= 15% flag = aice >= 0.15 print 'Building timeseries' for t in range(size(new_time)): # Integrate extent and convert to million km^2 extent.append(sum(flag[t,:,:]*dA)*1e-12) print 'Plotting' clf() plot(time, extent) xlabel('Years') ylabel(r'Sea Ice Extent (million km$^2$)') grid(True) savefig('seaice_extent.png') print 'Saving results to log file' f = open(log_path, 'w') f.write('Time (years):\n') for elm in time: f.write(str(elm) + '\n') f.write('Sea Ice Extent (million km^2):\n') for elm in extent: f.write(str(elm) + '\n') f.close()
def seaice_budget(cice_file, roms_grid, save=False, fig_names=None): # Read bathymetry values for ROMS grid id = Dataset(roms_grid, 'r') h = id.variables['h'][1:-1, 1:-1] id.close() # Read CICE grid id = Dataset(cice_file, 'r') lon = id.variables['TLON'][:, :] lat = id.variables['TLAT'][:, :] # Calculate elements of area dx, dy = cartesian_grid_2d(lon, lat) dA = dx * dy # Read time values time = id.variables['time'][:] / 365.25 # Read data (concentration and thermodynamic/dynamic volume tendencies) aice = id.variables['aice'][:, :, :] dvidtt = id.variables['dvidtt'][:, :, :] dvidtd = id.variables['dvidtd'][:, :, :] id.close() # Create masks for shelf and offshore region shelf = (lat < -60) * (h < 1500) offshore = invert(shelf) dvidtt_shelf = [] dvidtd_shelf = [] dvidtt_offshore = [] dvidtd_offshore = [] # Loop over timesteps for t in range(size(time)): # Only average over regions with at least 10% sea ice aice_flag = aice[t, :, :] > 0.1 # Thermodynamic volume tendency averaged over the continental shelf dvidtt_shelf.append( sum(dvidtt[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Dynamic volume tendency averaged over the continental shelf dvidtd_shelf.append( sum(dvidtd[t, :, :] * dA * shelf * aice_flag) / sum(dA * shelf * aice_flag)) # Thermodynamic volume tendency averaged over the offshore region dvidtt_offshore.append( sum(dvidtt[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Dynamic volume tendency averaged over the offshore region dvidtd_offshore.append( sum(dvidtd[t, :, :] * dA * offshore * aice_flag) / sum(dA * offshore * aice_flag)) # Convert to arrays and sum to get total volume tendencies for each region dvidtt_shelf = array(dvidtt_shelf) dvidtd_shelf = array(dvidtd_shelf) dvi_shelf = dvidtt_shelf + dvidtd_shelf dvidtt_offshore = array(dvidtt_offshore) dvidtd_offshore = array(dvidtd_offshore) dvi_offshore = dvidtt_offshore + dvidtd_offshore # Set up continental shelf plot fig1, ax1 = subplots(figsize=(8, 6)) # Add one timeseries at a time ax1.plot(time, dvidtt_shelf, label='Thermodynamics', color='blue', linewidth=2) ax1.plot(time, dvidtd_shelf, label='Dynamics', color='green', linewidth=2) ax1.plot(time, dvi_shelf, label='Total', color='black', linewidth=2) # Configure plot title('Volume tendency averaged over continental shelf') xlabel('Time (years)') ylabel('cm/day') grid(True) # Add a legend ax1.legend(loc='upper left') if save: fig1.savefig(fig_names[0]) else: fig1.show() # Same for offshore plot fig2, ax2 = subplots(figsize=(8, 6)) ax2.plot(time, dvidtt_offshore, label='Thermodynamics', color='blue', linewidth=2) ax2.plot(time, dvidtd_offshore, label='Dynamics', color='green', linewidth=2) ax2.plot(time, dvi_offshore, label='Total', color='black', linewidth=2) title('Volume tendency averaged over offshore region') xlabel('Time (years)') ylabel('cm/day') grid(True) ax2.legend(loc='lower right') if save: fig2.savefig(fig_names[1]) else: fig2.show() # Get cumulative sums of each term dvidtt_shelf_cum = cumsum(dvidtt_shelf) * 5 dvidtd_shelf_cum = cumsum(dvidtd_shelf) * 5 dvi_shelf_cum = cumsum(dvi_shelf) * 5 dvidtt_offshore_cum = cumsum(dvidtt_offshore) * 5 dvidtd_offshore_cum = cumsum(dvidtd_offshore) * 5 dvi_offshore_cum = cumsum(dvi_offshore) * 5 # Continental shelf cumulative plot fig3, ax3 = subplots(figsize=(8, 6)) ax3.plot(time, dvidtt_shelf_cum, label='Thermodynamics', color='blue', linewidth=2) ax3.plot(time, dvidtd_shelf_cum, label='Dynamics', color='green', linewidth=2) ax3.plot(time, dvi_shelf_cum, label='Total', color='black', linewidth=2) title('Cumulative volume tendency averaged over continental shelf') xlabel('Time (years)') ylabel('cm') grid(True) ax3.legend(loc='upper left') if save: fig3.savefig(fig_names[2]) else: fig3.show() # Offshore cumulative plot fig4, ax4 = subplots(figsize=(8, 6)) ax4.plot(time, dvidtt_offshore_cum, label='Thermodynamics', color='blue', linewidth=2) ax4.plot(time, dvidtd_offshore_cum, label='Dynamics', color='green', linewidth=2) ax4.plot(time, dvi_offshore_cum, label='Total', color='black', linewidth=2) title('Cumulative volume tendency averaged over offshore region') xlabel('Time (years)') ylabel('cm') grid(True) ax4.legend(loc='upper right') if save: fig4.savefig(fig_names[3]) else: fig4.show()
def timeseries_seaice(file_path, log_path, add_years=0): time = [] total_area = [] total_volume = [] # Check if the log file exists if exists(log_path): print 'Reading previously calculated values' f = open(log_path, 'r') # Skip first line (header for time array) f.readline() for line in f: try: time.append(float(line)) except (ValueError): # Reached the header for the next variable break for line in f: try: total_area.append(float(line)) except (ValueError): break for line in f: total_volume.append(float(line)) f.close() print 'Analysing grid' id = Dataset(file_path, 'r') lon = id.variables['TLON'][:-15, :] lat = id.variables['TLAT'][:-15, :] # Calculate area on the tracer grid dx, dy = cartesian_grid_2d(lon, lat) dA = dx * dy # Read time values and convert from days to years new_time = id.variables['time'][:] / 365.25 + add_years # Concatenate with time values from log file for t in range(size(new_time)): time.append(new_time[t]) print 'Reading data' # Read sea ice concentration and height # Throw away northern sponge layer aice = id.variables['aice'][:, :-15, :] hi = id.variables['hi'][:, :-15, :] id.close() print 'Setting up arrays' # Remove masks and fill with zeros (was having weird masking issues here) aice_nomask = aice.data aice_nomask[aice.mask] = 0.0 hi_nomask = hi.data hi_nomask[hi.mask] = 0.0 # Build timeseries for t in range(size(new_time)): # Integrate area and convert to million km^2 total_area.append(sum(aice_nomask[t, :, :] * dA) * 1e-12) # Integrate volume and convert to thousand km^3 total_volume.append( sum(aice_nomask[t, :, :] * hi_nomask[t, :, :] * dA) * 1e-12) print 'Plotting total sea ice area' clf() plot(time, total_area) xlabel('Years') ylabel(r'Total Sea Ice Area (million km$^2$)') grid(True) savefig('seaice_area.png') print 'Plotting total sea ice volume' clf() plot(time, total_volume) xlabel('Years') ylabel(r'Total Sea Ice Volume (thousand km$^3$)') grid(True) savefig('seaice_volume.png') print 'Saving results to log file' f = open(log_path, 'w') f.write('Time (years):\n') for elm in time: f.write(str(elm) + '\n') f.write('Total Sea Ice Area (million km^2):\n') for elm in total_area: f.write(str(elm) + '\n') f.write('Total Sea Ice Volume (thousand km^3):\n') for elm in total_volume: f.write(str(elm) + '\n') f.close()