def __init__(self): from utils import log_netCDF_dataset_metadata logger.info( 'MeanDynamicTopographyDataReader initializing. Loading MDT dataset: {}' .format(self.MDT_file_path)) self.MDT_dataset = netCDF4.Dataset(self.MDT_file_path) logger.info('Successfully loaded MDT dataset: {}'.format( self.MDT_file_path)) log_netCDF_dataset_metadata(self.MDT_dataset) self.lats = np.array(self.MDT_dataset.variables['lat']) self.lons = np.array(self.MDT_dataset.variables['lon']) self.mdt = np.array(self.MDT_dataset.variables['mdt'][0]) self.u_geo = np.array(self.MDT_dataset.variables['u'][0]) self.v_geo = np.array(self.MDT_dataset.variables['v'][0]) self.latgrid_interp = None self.longrid_interp = None # Not even using the MDT dataset anymore. self.mdt_interp = None # self.interpolate_mdt_field() self.ugeo_interp = None self.vgeo_interp = None self.interpolate_u_geo_field()
def load_surface_wind_dataset(self): from utils import log_netCDF_dataset_metadata uwind_dataset_filepath, vwind_dataset_filepath = self.date_to_dataset_filepath( self.date) logger.info( 'Loading NCEP u_wind dataset: {}'.format(uwind_dataset_filepath)) self.u_wind_dataset = netCDF4.Dataset(uwind_dataset_filepath) logger.info('Successfully loaded NCEP u_wind dataset: {}'.format( uwind_dataset_filepath)) log_netCDF_dataset_metadata(self.u_wind_dataset) logger.info( 'Loading NCEP v_wind dataset: {}'.format(vwind_dataset_filepath)) self.v_wind_dataset = netCDF4.Dataset(vwind_dataset_filepath) logger.info('Successfully loaded NCEP v_wind dataset: {}'.format( vwind_dataset_filepath)) log_netCDF_dataset_metadata(self.v_wind_dataset) self.lats = np.array(self.u_wind_dataset.variables['lat']) self.lons = np.array(self.u_wind_dataset.variables['lon']) self.lons = np.append(self.lons, 360.0) # Numbering starts from 0 so we minus 1 to get the right index. self.u_wind = np.array( self.u_wind_dataset.variables['uwnd'][self.day_of_year - 1]) self.v_wind = np.array( self.v_wind_dataset.variables['vwnd'][self.day_of_year - 1]) self.u_wind = np.c_[self.u_wind, self.u_wind[:, 0]] self.v_wind = np.c_[self.v_wind, self.v_wind[:, 0]]
def load_u_geo_dataset(self): from utils import log_netCDF_dataset_metadata dataset_filepath = self.date_to_u_geo_dataset_filepath(self.date) logger.info('Loading geostrophic velocity dataset: {}'.format( dataset_filepath)) self.u_geo_dataset = netCDF4.Dataset(dataset_filepath) logger.info( 'Successfully loaded geostrophic velocity dataset: {}'.format( dataset_filepath)) log_netCDF_dataset_metadata(self.u_geo_dataset) self.lats = np.array(self.u_geo_dataset.variables['latitude']) self.lons = np.array(self.u_geo_dataset.variables['longitude']) self.u_geo = np.array(self.u_geo_dataset.variables['ugos'][0]) self.v_geo = np.array(self.u_geo_dataset.variables['vgos'][0])
def load_temperature_dataset(self): from utils import log_netCDF_dataset_metadata dataset_filepath = self.temperature_dataset_filepath() logger.info('Loading temperature dataset: {}'.format(dataset_filepath)) self.temperature_dataset = netCDF4.Dataset(dataset_filepath) logger.info('Successfully loaded temperature dataset: {}'.format(dataset_filepath)) log_netCDF_dataset_metadata(self.temperature_dataset) self.lats = np.array(self.temperature_dataset.variables['lat']) self.lons = np.array(self.temperature_dataset.variables['lon']) self.depths = np.array(self.temperature_dataset.variables['depth']) field_var = 't_' + self.field_type self.temperature_data = np.array(self.temperature_dataset.variables[field_var])
def load_alpha_dataset(self): from utils import log_netCDF_dataset_metadata dataset_filepath = self.date_to_alpha_dataset_filepath(self.date) logger.info('Loading sea ice concentration dataset: {}'.format( dataset_filepath)) self.alpha_dataset = netCDF4.Dataset(dataset_filepath) logger.info( 'Successfully loaded sea ice concentration dataset: {}'.format( dataset_filepath)) log_netCDF_dataset_metadata(self.alpha_dataset) self.lats = np.array(self.alpha_dataset.variables['latitude']) self.lons = np.array(self.alpha_dataset.variables['longitude']) self.xgrid = np.array(self.alpha_dataset.variables['xgrid']) self.ygrid = np.array(self.alpha_dataset.variables['ygrid']) self.alpha = np.array( self.alpha_dataset.variables['goddard_nt_seaice_conc'][0])
def __init__(self, time_span, avg_period, grid_size, field_type, depth_levels): """ :param time_span: Choose from '5564', '6574', '7584', '8594', '95A4', 'A5B2', 'decav', and 'all'. :param avg_period: Choose from annual ('00'), monthly ('01'-'12'), and seasonal ('13' for JFM, '14' for AMJ, '15' for JAS, and '16' for OND). :param grid_size: Choose from '04', '01', and '5d'. :param field_type: Choose from 'an', 'mn', 'dd', 'ma', 'sd', 'se', 'oa', and 'gp'. :param depth_level: """ self.neutral_density_dataset = None self.time_span = time_span self.avg_period = avg_period self.grid_size = grid_size self.field_type = field_type self.depth_levels = depth_levels self.grid_size_dir = None if grid_size == '04': self.grid_size_dir = '0.25' elif grid_size == '01': self.grid_size_dir = '1.00' elif grid_size == '5d': self.grid_size_dir = '5deg' self.lats = None self.lons = None logger.info('NeutralDensityDataset object initializing for time span {} and averaging period {}...' .format(self.time_span, self.avg_period)) self.lats = np.linspace(lat_min, lat_max, n_lat) self.lons = np.linspace(lon_min, lon_max, n_lon) self.neutral_density_field = np.zeros((len(self.depth_levels), len(self.lats), len(self.lons))) self.salinity_field = np.zeros((len(self.depth_levels), len(self.lats), len(self.lons))) self.temperature_field = np.zeros((len(self.depth_levels), len(self.lats), len(self.lons))) self.salinity_dataset = SalinityDataset(time_span, avg_period, grid_size, field_type) self.temperature_dataset = TemperatureDataset(time_span, avg_period, grid_size, field_type) # If dataset already exists and is stored, load it up. for i in range(len(self.depth_levels)): neutral_density_dataset_filepath = self.neutral_density_dataset_filepath(self.depth_levels[i]) try: self.neutral_density_dataset = netCDF4.Dataset(neutral_density_dataset_filepath) log_netCDF_dataset_metadata(self.neutral_density_dataset) self.salinity_field[i] = np.array(self.neutral_density_dataset.variables['salinity']) self.temperature_field[i] = np.array(self.neutral_density_dataset.variables['temperature']) self.neutral_density_field[i] = np.array(self.neutral_density_dataset.variables['neutral_density']) except Exception as e: logger.error('{}'.format(e)) logger.warning('{:s} not found. Neutral density field will now be computed...' .format(neutral_density_dataset_filepath)) return self.neutral_density_dataset = None self.salinity_field = np.zeros((len(self.lats), len(self.lons))) self.temperature_field = np.zeros((len(self.lats), len(self.lons))) self.neutral_density_field = np.zeros((len(self.lats), len(self.lons))) self.calculate_neutral_density_field() self.save_neutral_density_dataset()
def retroactively_compute_melting_freezing_rate(): from constants import D_e, kappa # import constants # constants.output_dir_path = 'D:\\output\\' start_date = datetime.date(2011, 1, 1) end_date = datetime.date(2016, 12, 31) dates = date_range(start_date, end_date) # dates = [] # for year in range(2011, 2016 + 1): # start_date = datetime.date(year, 7, 1) # end_date = datetime.date(year, 9, 30) # # dates = dates + date_range(start_date, end_date) h_ice_dataset = SeaIceThicknessDataset(start_date) # Load mixed layer salinity maps from Pellichero et al. (2017, 2018). gamma_filepath = os.path.join(data_dir_path, 'Climatology_MLD_v2017.nc') logger.info('Loading gamma dataset: {}'.format(gamma_filepath)) gamma_dataset = netCDF4.Dataset(gamma_filepath) lats_ml = np.array(gamma_dataset.variables['lat']) lons_ml = np.array(gamma_dataset.variables['lon']) mixed_layer_data = np.array(gamma_dataset.variables['ML_SA']) lats_ml_max = len(lats_ml) - 1 lons_ml_max = len(lons_ml) - 1 salinity_monthly_climo = [None] * 12 for month in range(12): salinity_monthly_climo[month] = mixed_layer_data[month] try: tau_filepath = get_netCDF_filepath(field_type='daily', date=start_date) tau_dataset = netCDF4.Dataset(tau_filepath) lats = np.array(tau_dataset.variables['lat']) lons = np.array(tau_dataset.variables['lon']) except OSError as e: logger.error('{}'.format(e)) logger.error('{:s} not found. Could not load lat, lon arrays.'.format( tau_filepath)) alpha_avg_field = np.zeros((len(lats), len(lons))) alpha_day_field = np.zeros((len(lats), len(lons))) u_ice_avg_field = np.zeros((len(lats), len(lons))) u_ice_day_field = np.zeros((len(lats), len(lons))) v_ice_avg_field = np.zeros((len(lats), len(lons))) v_ice_day_field = np.zeros((len(lats), len(lons))) h_ice_avg_field = np.zeros((len(lats), len(lons))) h_ice_day_field = np.zeros((len(lats), len(lons))) zonal_div_avg_field = np.zeros((len(lats), len(lons))) zonal_div_day_field = np.zeros((len(lats), len(lons))) merid_div_avg_field = np.zeros((len(lats), len(lons))) merid_div_day_field = np.zeros((len(lats), len(lons))) div_avg_field = np.zeros((len(lats), len(lons))) div_day_field = np.zeros((len(lats), len(lons))) Ekman_term_avg_field = np.zeros((len(lats), len(lons))) Ekman_term_day_field = np.zeros((len(lats), len(lons))) geo_term_avg_field = np.zeros((len(lats), len(lons))) geo_term_day_field = np.zeros((len(lats), len(lons))) diffusion_term_avg_field = np.zeros((len(lats), len(lons))) diffusion_term_day_field = np.zeros((len(lats), len(lons))) salinity_avg_field = np.zeros((len(lats), len(lons))) salinity_day_field = np.zeros((len(lats), len(lons))) for date in dates: tau_filepath = get_netCDF_filepath(field_type='daily', date=date) logger.info('Averaging {:%b %d, %Y} ({:s})...'.format( date, tau_filepath)) try: current_tau_dataset = netCDF4.Dataset(tau_filepath) log_netCDF_dataset_metadata(current_tau_dataset) except OSError as e: logger.error('{}'.format(e)) logger.warning('{:s} not found. Proceeding without it...'.format( tau_filepath)) continue alpha_daily_field = np.array(current_tau_dataset.variables['alpha']) u_ice_daily_field = np.array(current_tau_dataset.variables['ice_u']) v_ice_daily_field = np.array(current_tau_dataset.variables['ice_v']) u_geo_daily_field = np.array(current_tau_dataset.variables['geo_u']) v_geo_daily_field = np.array(current_tau_dataset.variables['geo_v']) # U_Ekman_daily_field = np.array(current_tau_dataset.variables['Ekman_U']) # V_Ekman_daily_field = np.array(current_tau_dataset.variables['Ekman_V']) U_Ekman_daily_field = D_e * np.array( current_tau_dataset.variables['Ekman_u']) V_Ekman_daily_field = D_e * np.array( current_tau_dataset.variables['Ekman_v']) # salinity_daily_field = np.array(current_tau_dataset.variables['salinity']) salinity_daily_field = salinity_monthly_climo[date.month - 1] # logger.info('salinity_daily_field: min = {:}, max = {:}, mean = {:}' # .format(np.nanmin(salinity_daily_field), np.nanmax(salinity_daily_field), # np.nanmean(salinity_daily_field))) h_ice_daily_field = np.zeros((len(lats), len(lons))) zonal_div_daily_field = np.zeros((len(lats), len(lons))) merid_div_daily_field = np.zeros((len(lats), len(lons))) div_daily_field = np.zeros((len(lats), len(lons))) Ekman_term_daily_field = np.zeros((len(lats), len(lons))) geo_term_daily_field = np.zeros((len(lats), len(lons))) diffusion_term_daily_field = np.zeros((len(lats), len(lons))) salinity_interp_daily_field = np.zeros((len(lats), len(lons))) salinity_interp_daily_field[:] = np.nan # Load h_ice field for the day (i.e. the correct seasonal field). for i in range(len(lats)): for j in range(len(lons)): h_ice_daily_field[i][j] = h_ice_dataset.sea_ice_thickness( i, j, date) i_max = len(lats) - 1 j_max = len(lons) - 1 for i in range(1, len(lats) - 1): lat = lats[i] # progress_percent = 100 * i / (len(lats) - 2) # logger.info('({:}, ice_div) lat = {:.2f}/{:.2f} ({:.1f}%)'.format(date, lat, -40, progress_percent)) dx = distance(lats[i - 1], lons[0], lats[i + 1], lons[0]) dy = distance(lats[i], lons[0], lats[i], lons[2]) for j in range(len(lons)): lon = lons[j] if lon < 0: lon_ml = lon + 360 else: lon_ml = lon # Taking modulus of j-1 and j+1 to get the correct index in the special cases of # * j=0 (180 W) and need to use the value from j=j_max (180 E) # * j=j_max (180 E) and need to use the value from j=0 (180 W) jm1 = (j - 1) % j_max jp1 = (j + 1) % j_max u_ice_i_jp1 = u_ice_daily_field[i][jp1] u_ice_i_jm1 = u_ice_daily_field[i][jm1] v_ice_ip1_j = v_ice_daily_field[i + 1][j] v_ice_im1_j = v_ice_daily_field[i - 1][j] alpha_i_jp1 = alpha_daily_field[i][jp1] alpha_i_jm1 = alpha_daily_field[i][jm1] alpha_ip1_j = alpha_daily_field[i + 1][j] alpha_im1_j = alpha_daily_field[i - 1][j] h_ice_i_jp1 = h_ice_daily_field[i][jp1] h_ice_i_jm1 = h_ice_daily_field[i][jm1] h_ice_ip1_j = h_ice_daily_field[i + 1][j] h_ice_im1_j = h_ice_daily_field[i - 1][j] idx_lat = np.abs(lats_ml - lat).argmin() idx_lon = np.abs(lons_ml - lon_ml).argmin() idx_lonp1 = (idx_lon + 1) % lons_ml_max idx_lonm1 = (idx_lon - 1) % lons_ml_max S_ij = salinity_daily_field[idx_lat][idx_lon] S_i_jp1 = salinity_daily_field[idx_lat][idx_lonp1] S_i_jm1 = salinity_daily_field[idx_lat][idx_lonm1] S_ip1_j = salinity_daily_field[idx_lat + 1][idx_lon] S_im1_j = salinity_daily_field[idx_lat - 1][idx_lon] salinity_interp_daily_field[i][j] = S_ij # S_ij = salinity_daily_field[i][j] # S_i_jp1 = salinity_daily_field[i][jp1] # S_i_jm1 = salinity_daily_field[i][jm1] # S_ip1_j = salinity_daily_field[i+1][j] # S_im1_j = salinity_daily_field[i-1][j] u_geo_ij = u_geo_daily_field[i][j] v_geo_ij = v_geo_daily_field[i][j] U_Ekman_ij = U_Ekman_daily_field[i][j] V_Ekman_ij = V_Ekman_daily_field[i][j] if not np.isnan(u_ice_i_jm1) and not np.isnan(u_ice_i_jp1): div_x = (alpha_i_jp1 * h_ice_i_jp1 * u_ice_i_jp1 - alpha_i_jm1 * h_ice_i_jm1 * u_ice_i_jm1) / dx zonal_div_daily_field[i][j] = div_x else: zonal_div_daily_field[i][j] = np.nan if not np.isnan(v_ice_im1_j) and not np.isnan(v_ice_ip1_j): div_y = (alpha_ip1_j * h_ice_ip1_j * v_ice_ip1_j - alpha_im1_j * h_ice_im1_j * v_ice_im1_j) / dy merid_div_daily_field[i][j] = div_y else: merid_div_daily_field[i][j] = np.nan if not np.isnan(zonal_div_daily_field[i][j]) and not np.isnan( merid_div_daily_field[i][j]): div_daily_field[i][j] = div_x + div_y else: div_daily_field[i][j] = np.nan if not np.isnan(S_ij) and not np.isnan(S_i_jm1) and not np.isnan(S_i_jp1) and not np.isnan(S_ip1_j) \ and not np.isnan(S_im1_j): dSdx = (S_i_jp1 - S_i_jm1) / dx dSdy = (S_ip1_j - S_im1_j) / dy del2_S = ((S_i_jp1 - 2 * S_ij + S_i_jm1) / dx**2) + ( (S_ip1_j - 2 * S_ij + S_im1_j) / dy**2) Ekman_term_daily_field[i][j] = (U_Ekman_ij * dSdx + V_Ekman_ij * dSdy) / S_ij geo_term_daily_field[i][j] = (D_e / S_ij) * ( u_geo_ij * dSdx + v_geo_ij * dSdy) diffusion_term_daily_field[i][j] = (kappa * D_e / S_ij) * del2_S else: Ekman_term_daily_field[i][j] = np.nan geo_term_daily_field[i][j] = np.nan diffusion_term_daily_field[i][j] = np.nan if np.isnan(alpha_daily_field[i] [j]) or alpha_daily_field[i][j] < 0.15: Ekman_term_daily_field[i][j] = np.nan geo_term_daily_field[i][j] = np.nan diffusion_term_daily_field[i][j] = np.nan alpha_avg_field = alpha_avg_field + np.nan_to_num(alpha_daily_field) alpha_daily_field[~np.isnan(alpha_daily_field)] = 1 alpha_daily_field[np.isnan(alpha_daily_field)] = 0 alpha_day_field = alpha_day_field + alpha_daily_field u_ice_avg_field = u_ice_avg_field + np.nan_to_num(u_ice_daily_field) u_ice_daily_field[~np.isnan(u_ice_daily_field)] = 1 u_ice_daily_field[np.isnan(u_ice_daily_field)] = 0 u_ice_day_field = u_ice_day_field + u_ice_daily_field v_ice_avg_field = v_ice_avg_field + np.nan_to_num(v_ice_daily_field) v_ice_daily_field[~np.isnan(v_ice_daily_field)] = 1 v_ice_daily_field[np.isnan(v_ice_daily_field)] = 0 v_ice_day_field = v_ice_day_field + v_ice_daily_field h_ice_avg_field = h_ice_avg_field + np.nan_to_num(h_ice_daily_field) h_ice_daily_field[~np.isnan(h_ice_daily_field)] = 1 h_ice_daily_field[np.isnan(h_ice_daily_field)] = 0 h_ice_day_field = h_ice_day_field + h_ice_daily_field zonal_div_avg_field = zonal_div_avg_field + np.nan_to_num( zonal_div_daily_field) zonal_div_daily_field[~np.isnan(zonal_div_daily_field)] = 1 zonal_div_daily_field[np.isnan(zonal_div_daily_field)] = 0 zonal_div_day_field = zonal_div_day_field + zonal_div_daily_field merid_div_avg_field = merid_div_avg_field + np.nan_to_num( merid_div_daily_field) merid_div_daily_field[~np.isnan(merid_div_daily_field)] = 1 merid_div_daily_field[np.isnan(merid_div_daily_field)] = 0 merid_div_day_field = merid_div_day_field + merid_div_daily_field div_avg_field = div_avg_field + np.nan_to_num(div_daily_field) div_daily_field[~np.isnan(div_daily_field)] = 1 div_daily_field[np.isnan(div_daily_field)] = 0 div_day_field = div_day_field + div_daily_field Ekman_term_avg_field = Ekman_term_avg_field + np.nan_to_num( Ekman_term_daily_field) Ekman_term_daily_field[~np.isnan(Ekman_term_daily_field)] = 1 Ekman_term_daily_field[np.isnan(Ekman_term_daily_field)] = 0 Ekman_term_day_field = Ekman_term_day_field + Ekman_term_daily_field geo_term_avg_field = geo_term_avg_field + np.nan_to_num( geo_term_daily_field) geo_term_daily_field[~np.isnan(geo_term_daily_field)] = 1 geo_term_daily_field[np.isnan(geo_term_daily_field)] = 0 geo_term_day_field = geo_term_day_field + geo_term_daily_field diffusion_term_avg_field = diffusion_term_avg_field + np.nan_to_num( diffusion_term_daily_field) diffusion_term_daily_field[~np.isnan(diffusion_term_daily_field)] = 1 diffusion_term_daily_field[np.isnan(diffusion_term_daily_field)] = 0 diffusion_term_day_field = diffusion_term_day_field + diffusion_term_daily_field # logger.info('salinity_interp_daily_field: min = {:}, max = {:}, mean = {:}' # .format(np.nanmin(salinity_interp_daily_field), np.nanmax(salinity_interp_daily_field), # np.nanmean(salinity_interp_daily_field))) salinity_avg_field = salinity_avg_field + np.nan_to_num( salinity_interp_daily_field) salinity_interp_daily_field[~np.isnan(salinity_interp_daily_field)] = 1 salinity_interp_daily_field[np.isnan(salinity_interp_daily_field)] = 0 salinity_day_field = salinity_day_field + salinity_interp_daily_field # logger.info('salinity_avg_field: min = {:}, max = {:}, mean = {:}' # .format(np.nanmin(salinity_avg_field), np.nanmax(salinity_avg_field), # np.nanmean(salinity_avg_field))) # logger.info('salinity_day_field: min = {:}, max = {:}, mean = {:}' # .format(np.nanmin(salinity_day_field), np.nanmax(salinity_day_field), # np.nanmean(salinity_day_field))) alpha_avg_field = np.divide(alpha_avg_field, alpha_day_field) u_ice_avg_field = np.divide(u_ice_avg_field, u_ice_day_field) v_ice_avg_field = np.divide(v_ice_avg_field, v_ice_day_field) h_ice_avg_field = np.divide(h_ice_avg_field, h_ice_day_field) zonal_div_avg_field = np.divide(zonal_div_avg_field, zonal_div_day_field) merid_div_avg_field = np.divide(merid_div_avg_field, merid_div_day_field) div_avg_field = 3600 * 24 * 365 * np.divide(div_avg_field, div_day_field) Ekman_term_avg_field = 3600 * 24 * 365 * np.divide(Ekman_term_avg_field, Ekman_term_day_field) geo_term_avg_field = 3600 * 24 * 365 * np.divide(geo_term_avg_field, geo_term_day_field) diffusion_term_avg_field = 3600 * 24 * 365 * np.divide( diffusion_term_avg_field, diffusion_term_day_field) salinity_avg_field = np.divide(salinity_avg_field, salinity_day_field) logger.info('salinity_avg_field: min = {:}, max = {:}, mean = {:}'.format( np.nanmin(salinity_avg_field), np.nanmax(salinity_avg_field), np.nanmean(salinity_avg_field))) logger.info('salinity_day_field: min = {:}, max = {:}, mean = {:}'.format( np.nanmin(salinity_day_field), np.nanmax(salinity_day_field), np.nanmean(salinity_day_field))) nc_dir = os.path.dirname(output_dir_path) nc_filepath = os.path.join( nc_dir, 'melting_freezing_rate_{:}_{:}.nc'.format(start_date, end_date)) if not os.path.exists(nc_dir): logger.info('Creating directory: {:s}'.format(nc_dir)) os.makedirs(nc_dir) logger.info('Saving fields to netCDF file: {:s}'.format(nc_filepath)) tau_dataset = netCDF4.Dataset(nc_filepath, 'w') tau_dataset.title = 'Melting and freezing rates in the Antarctic sea ice zone' tau_dataset.institution = 'Department of Earth, Atmospheric, and Planetary Science, ' \ 'Massachusetts Institute of Technology' tau_dataset.createDimension('time', None) tau_dataset.createDimension('lat', len(lats)) tau_dataset.createDimension('lon', len(lons)) # TODO: Actually store a date. time_var = tau_dataset.createVariable('time', np.float64, ('time', )) time_var.units = 'hours since 0001-01-01 00:00:00' time_var.calendar = 'gregorian' lat_var = tau_dataset.createVariable('lat', np.float32, ('lat', )) lat_var.units = 'degrees south' lat_var[:] = lats lon_var = tau_dataset.createVariable('lon', np.float32, ('lon', )) lat_var.units = 'degrees west/east' lon_var[:] = lons var_fields = { 'alpha': alpha_avg_field, 'u_ice': u_ice_avg_field, 'v_ice': v_ice_avg_field, 'h_ice': h_ice_avg_field, 'zonal_div': zonal_div_avg_field, 'merid_div': merid_div_avg_field, 'div': div_avg_field, 'Ekman_term': Ekman_term_avg_field, 'geo_term': geo_term_avg_field, 'diffusion_term': diffusion_term_avg_field, 'salinity': salinity_avg_field } for var_name in var_fields.keys(): field_var = tau_dataset.createVariable(var_name, float, ('lat', 'lon'), zlib=True) field_var[:] = var_fields[var_name] tau_dataset.close()
def retroactively_compute_sea_ice_advection(): start_date = datetime.date(2005, 1, 1) end_date = datetime.date(2005, 12, 31) dates = date_range(start_date, end_date) h_ice_dataset = SeaIceThicknessDataset(start_date) import constants constants.output_dir_path = 'D:\\output\\' try: tau_filepath = get_netCDF_filepath(field_type='daily', date=start_date) tau_dataset = netCDF4.Dataset(tau_filepath) lats = np.array(tau_dataset.variables['lat']) lons = np.array(tau_dataset.variables['lon']) except OSError as e: logger.error('{}'.format(e)) logger.error('{:s} not found. Could not load lat, lon arrays.'.format( tau_filepath)) alpha_avg_field = np.zeros((len(lats), len(lons))) alpha_day_field = np.zeros((len(lats), len(lons))) u_ice_avg_field = np.zeros((len(lats), len(lons))) u_ice_day_field = np.zeros((len(lats), len(lons))) v_ice_avg_field = np.zeros((len(lats), len(lons))) v_ice_day_field = np.zeros((len(lats), len(lons))) h_ice_avg_field = np.zeros((len(lats), len(lons))) h_ice_day_field = np.zeros((len(lats), len(lons))) zonal_div_avg_field = np.zeros((len(lats), len(lons))) zonal_div_day_field = np.zeros((len(lats), len(lons))) merid_div_avg_field = np.zeros((len(lats), len(lons))) merid_div_day_field = np.zeros((len(lats), len(lons))) div_avg_field = np.zeros((len(lats), len(lons))) div_day_field = np.zeros((len(lats), len(lons))) hu_dadx_avg_field = np.zeros((len(lats), len(lons))) au_dhdx_avg_field = np.zeros((len(lats), len(lons))) ah_dudx_avg_field = np.zeros((len(lats), len(lons))) hv_dady_avg_field = np.zeros((len(lats), len(lons))) av_dhdy_avg_field = np.zeros((len(lats), len(lons))) ah_dvdy_avg_field = np.zeros((len(lats), len(lons))) hu_dadx_day_field = np.zeros((len(lats), len(lons))) au_dhdx_day_field = np.zeros((len(lats), len(lons))) ah_dudx_day_field = np.zeros((len(lats), len(lons))) hv_dady_day_field = np.zeros((len(lats), len(lons))) av_dhdy_day_field = np.zeros((len(lats), len(lons))) ah_dvdy_day_field = np.zeros((len(lats), len(lons))) div2_avg_field = np.zeros((len(lats), len(lons))) div2_day_field = np.zeros((len(lats), len(lons))) for date in dates: tau_filepath = get_netCDF_filepath(field_type='daily', date=date) logger.info('Averaging {:%b %d, %Y} ({:s})...'.format( date, tau_filepath)) try: current_tau_dataset = netCDF4.Dataset(tau_filepath) log_netCDF_dataset_metadata(current_tau_dataset) except OSError as e: logger.error('{}'.format(e)) logger.warning('{:s} not found. Proceeding without it...'.format( tau_filepath)) continue alpha_daily_field = np.array(current_tau_dataset.variables['alpha']) u_ice_daily_field = np.array(current_tau_dataset.variables['ice_u']) v_ice_daily_field = np.array(current_tau_dataset.variables['ice_v']) h_ice_daily_field = np.zeros((len(lats), len(lons))) zonal_div_daily_field = np.zeros((len(lats), len(lons))) merid_div_daily_field = np.zeros((len(lats), len(lons))) div_daily_field = np.zeros((len(lats), len(lons))) hu_dadx_daily_field = np.zeros((len(lats), len(lons))) au_dhdx_daily_field = np.zeros((len(lats), len(lons))) ah_dudx_daily_field = np.zeros((len(lats), len(lons))) hv_dady_daily_field = np.zeros((len(lats), len(lons))) av_dhdy_daily_field = np.zeros((len(lats), len(lons))) ah_dvdy_daily_field = np.zeros((len(lats), len(lons))) div2_daily_field = np.zeros((len(lats), len(lons))) # Load h_ice field for the day (i.e. the correct seasonal field). for i in range(len(lats)): for j in range(len(lons)): h_ice_daily_field[i][j] = h_ice_dataset.sea_ice_thickness( i, j, date) # import astropy.convolution # kernel = astropy.convolution.Box2DKernel(10) # alpha_daily_field = astropy.convolution.convolve(alpha_daily_field, kernel, boundary='wrap') # h_ice_daily_field = astropy.convolution.convolve(h_ice_daily_field, kernel, boundary='wrap') # u_ice_daily_field = astropy.convolution.convolve(u_ice_daily_field, kernel, boundary='wrap') # v_ice_daily_field = astropy.convolution.convolve(v_ice_daily_field, kernel, boundary='wrap') i_max = len(lats) - 1 j_max = len(lons) - 1 for i in range(1, len(lats) - 1): # lat = lats[i] # progress_percent = 100 * i / (len(lats) - 2) # logger.info('({:}, ice_div) lat = {:.2f}/{:.2f} ({:.1f}%)'.format(date, lat, -40, progress_percent)) dx = distance(lats[i - 1], lons[0], lats[i + 1], lons[0]) dy = distance(lats[i], lons[0], lats[i], lons[2]) for j in range(1, len(lons) - 1): # Taking modulus of j-1 and j+1 to get the correct index in the special cases of # * j=0 (180 W) and need to use the value from j=j_max (180 E) # * j=j_max (180 E) and need to use the value from j=0 (180 W) jm1 = (j - 1) % j_max jp1 = (j + 1) % j_max u_ice_ij = u_ice_daily_field[i][j] u_ice_i_jp1 = u_ice_daily_field[i][jp1] u_ice_i_jm1 = u_ice_daily_field[i][jm1] u_ice_ip1_j = u_ice_daily_field[i + 1][j] u_ice_im1_j = u_ice_daily_field[i - 1][j] v_ice_ij = v_ice_daily_field[i][j] v_ice_ip1_j = v_ice_daily_field[i + 1][j] v_ice_im1_j = v_ice_daily_field[i - 1][j] v_ice_i_jp1 = v_ice_daily_field[i][jp1] v_ice_i_jm1 = v_ice_daily_field[i][jm1] alpha_ij = alpha_daily_field[i][j] alpha_i_jp1 = alpha_daily_field[i][jp1] alpha_i_jm1 = alpha_daily_field[i][jm1] alpha_ip1_j = alpha_daily_field[i + 1][j] alpha_im1_j = alpha_daily_field[i - 1][j] h_ice_ij = h_ice_daily_field[i][j] h_ice_i_jp1 = h_ice_daily_field[i][jp1] h_ice_i_jm1 = h_ice_daily_field[i][jm1] h_ice_ip1_j = h_ice_daily_field[i + 1][j] h_ice_im1_j = h_ice_daily_field[i - 1][j] if not np.isnan(u_ice_i_jm1) and not np.isnan(u_ice_i_jp1): div_x = (alpha_i_jp1 * h_ice_i_jp1 * u_ice_i_jp1 - alpha_i_jm1 * h_ice_i_jm1 * u_ice_i_jm1) / dx zonal_div_daily_field[i][j] = div_x else: zonal_div_daily_field[i][j] = np.nan if not np.isnan(v_ice_im1_j) and not np.isnan(v_ice_ip1_j): div_y = (alpha_ip1_j * h_ice_ip1_j * v_ice_ip1_j - alpha_im1_j * h_ice_im1_j * v_ice_im1_j) / dy merid_div_daily_field[i][j] = div_y else: merid_div_daily_field[i][j] = np.nan if not np.isnan(zonal_div_daily_field[i][j]) and not np.isnan( merid_div_daily_field[i][j]): div_daily_field[i][j] = C_fw * (div_x + div_y) else: div_daily_field[i][j] = np.nan if not np.isnan(alpha_i_jm1) and not np.isnan(alpha_i_jp1): hu_dadx_daily_field[i][j] = h_ice_ij * u_ice_ij * ( alpha_i_jp1 - alpha_i_jm1) / dx else: hu_dadx_daily_field[i][j] = np.nan if not np.isnan(h_ice_i_jm1) and not np.isnan(h_ice_i_jp1): au_dhdx_daily_field[i][j] = alpha_ij * u_ice_ij * ( h_ice_i_jp1 - h_ice_i_jm1) / dx else: au_dhdx_daily_field[i][j] = np.nan if not np.isnan(u_ice_i_jm1) and not np.isnan(u_ice_i_jp1): ah_dudx_daily_field[i][j] = alpha_ij * h_ice_ij * ( u_ice_i_jp1 - u_ice_i_jm1) / dx else: ah_dudx_daily_field[i][j] = np.nan if not np.isnan(alpha_im1_j) and not np.isnan(alpha_ip1_j): hv_dady_daily_field[i][j] = h_ice_ij * v_ice_ij * ( alpha_ip1_j - alpha_im1_j) / dy else: hv_dady_daily_field[i][j] = np.nan if not np.isnan(h_ice_im1_j) and not np.isnan(h_ice_ip1_j): av_dhdy_daily_field[i][j] = alpha_ij * v_ice_ij * ( h_ice_ip1_j - h_ice_im1_j) / dy else: av_dhdy_daily_field[i][j] = np.nan if not np.isnan(v_ice_im1_j) and not np.isnan(v_ice_ip1_j): ah_dvdy_daily_field[i][j] = alpha_ij * h_ice_ij * ( v_ice_ip1_j - v_ice_im1_j) / dy else: ah_dvdy_daily_field[i][j] = np.nan div2_daily_field[i][j] = hu_dadx_daily_field[i][j] + au_dhdx_daily_field[i][j] \ + ah_dudx_daily_field[i][j] + hv_dady_daily_field[i][j] \ + av_dhdy_daily_field[i][j] + ah_dvdy_daily_field[i][j] # import astropy.convolution # kernel = astropy.convolution.Box2DKernel(10) # div_daily_field = astropy.convolution.convolve(div_daily_field, kernel, boundary='wrap') alpha_avg_field = alpha_avg_field + np.nan_to_num(alpha_daily_field) alpha_daily_field[~np.isnan(alpha_daily_field)] = 1 alpha_daily_field[np.isnan(alpha_daily_field)] = 0 alpha_day_field = alpha_day_field + alpha_daily_field u_ice_avg_field = u_ice_avg_field + np.nan_to_num(u_ice_daily_field) u_ice_daily_field[~np.isnan(u_ice_daily_field)] = 1 u_ice_daily_field[np.isnan(u_ice_daily_field)] = 0 u_ice_day_field = u_ice_day_field + u_ice_daily_field v_ice_avg_field = v_ice_avg_field + np.nan_to_num(v_ice_daily_field) v_ice_daily_field[~np.isnan(v_ice_daily_field)] = 1 v_ice_daily_field[np.isnan(v_ice_daily_field)] = 0 v_ice_day_field = v_ice_day_field + v_ice_daily_field h_ice_avg_field = h_ice_avg_field + np.nan_to_num(h_ice_daily_field) h_ice_daily_field[~np.isnan(h_ice_daily_field)] = 1 h_ice_daily_field[np.isnan(h_ice_daily_field)] = 0 h_ice_day_field = h_ice_day_field + h_ice_daily_field zonal_div_avg_field = zonal_div_avg_field + np.nan_to_num( zonal_div_daily_field) zonal_div_daily_field[~np.isnan(zonal_div_daily_field)] = 1 zonal_div_daily_field[np.isnan(zonal_div_daily_field)] = 0 zonal_div_day_field = zonal_div_day_field + zonal_div_daily_field merid_div_avg_field = merid_div_avg_field + np.nan_to_num( merid_div_daily_field) merid_div_daily_field[~np.isnan(merid_div_daily_field)] = 1 merid_div_daily_field[np.isnan(merid_div_daily_field)] = 0 merid_div_day_field = merid_div_day_field + merid_div_daily_field div_avg_field = div_avg_field + np.nan_to_num(div_daily_field) div_daily_field[~np.isnan(div_daily_field)] = 1 div_daily_field[np.isnan(div_daily_field)] = 0 div_day_field = div_day_field + div_daily_field hu_dadx_avg_field = hu_dadx_avg_field + np.nan_to_num( hu_dadx_daily_field) hu_dadx_daily_field[~np.isnan(hu_dadx_daily_field)] = 1 hu_dadx_daily_field[np.isnan(hu_dadx_daily_field)] = 0 hu_dadx_day_field = hu_dadx_day_field + hu_dadx_daily_field au_dhdx_avg_field = au_dhdx_avg_field + np.nan_to_num( au_dhdx_daily_field) au_dhdx_daily_field[~np.isnan(au_dhdx_daily_field)] = 1 au_dhdx_daily_field[np.isnan(au_dhdx_daily_field)] = 0 au_dhdx_day_field = au_dhdx_day_field + au_dhdx_daily_field ah_dudx_avg_field = ah_dudx_avg_field + np.nan_to_num( ah_dudx_daily_field) ah_dudx_daily_field[~np.isnan(ah_dudx_daily_field)] = 1 ah_dudx_daily_field[np.isnan(ah_dudx_daily_field)] = 0 ah_dudx_day_field = ah_dudx_day_field + ah_dudx_daily_field hv_dady_avg_field = hv_dady_avg_field + np.nan_to_num( hv_dady_daily_field) hv_dady_daily_field[~np.isnan(hv_dady_daily_field)] = 1 hv_dady_daily_field[np.isnan(hv_dady_daily_field)] = 0 hv_dady_day_field = hv_dady_day_field + hv_dady_daily_field av_dhdy_avg_field = av_dhdy_avg_field + np.nan_to_num( av_dhdy_daily_field) av_dhdy_daily_field[~np.isnan(av_dhdy_daily_field)] = 1 av_dhdy_daily_field[np.isnan(av_dhdy_daily_field)] = 0 av_dhdy_day_field = av_dhdy_day_field + av_dhdy_daily_field ah_dvdy_avg_field = ah_dvdy_avg_field + np.nan_to_num( ah_dvdy_daily_field) ah_dvdy_daily_field[~np.isnan(ah_dvdy_daily_field)] = 1 ah_dvdy_daily_field[np.isnan(ah_dvdy_daily_field)] = 0 ah_dvdy_day_field = ah_dvdy_day_field + ah_dvdy_daily_field div2_avg_field = div2_avg_field + np.nan_to_num(div2_daily_field) div2_daily_field[~np.isnan(div2_daily_field)] = 1 div2_daily_field[np.isnan(div2_daily_field)] = 0 div2_day_field = div2_day_field + div2_daily_field alpha_avg_field = np.divide(alpha_avg_field, alpha_day_field) u_ice_avg_field = np.divide(u_ice_avg_field, u_ice_day_field) v_ice_avg_field = np.divide(v_ice_avg_field, v_ice_day_field) h_ice_avg_field = np.divide(h_ice_avg_field, h_ice_day_field) zonal_div_avg_field = np.divide(zonal_div_avg_field, zonal_div_day_field) merid_div_avg_field = np.divide(merid_div_avg_field, merid_div_day_field) div_avg_field = 3600 * 24 * 365 * np.divide(div_avg_field, div_day_field) hu_dadx_avg_field = 3600 * 24 * 365 * np.divide(hu_dadx_avg_field, hu_dadx_day_field) au_dhdx_avg_field = 3600 * 24 * 365 * np.divide(au_dhdx_avg_field, au_dhdx_day_field) ah_dudx_avg_field = 3600 * 24 * 365 * np.divide(ah_dudx_avg_field, ah_dudx_day_field) hv_dady_avg_field = 3600 * 24 * 365 * np.divide(hv_dady_avg_field, hv_dady_day_field) av_dhdy_avg_field = 3600 * 24 * 365 * np.divide(av_dhdy_avg_field, av_dhdy_day_field) ah_dvdy_avg_field = 3600 * 24 * 365 * np.divide(ah_dvdy_avg_field, ah_dvdy_day_field) div2_avg_field = 3600 * 24 * 365 * np.divide(div2_avg_field, div2_day_field) nc_dir = os.path.dirname(output_dir_path) nc_filepath = os.path.join( nc_dir, 'ice_flux_div_{:}_{:}.nc'.format(start_date, end_date)) if not os.path.exists(nc_dir): logger.info('Creating directory: {:s}'.format(nc_dir)) os.makedirs(nc_dir) logger.info('Saving fields to netCDF file: {:s}'.format(nc_filepath)) tau_dataset = netCDF4.Dataset(nc_filepath, 'w') tau_dataset.title = 'Ice flux divergence in the Antarctic sea ice zone' tau_dataset.institution = 'Department of Earth, Atmospheric, and Planetary Science, ' \ 'Massachusetts Institute of Technology' tau_dataset.createDimension('time', None) tau_dataset.createDimension('lat', len(lats)) tau_dataset.createDimension('lon', len(lons)) # TODO: Actually store a date. time_var = tau_dataset.createVariable('time', np.float64, ('time', )) time_var.units = 'hours since 0001-01-01 00:00:00' time_var.calendar = 'gregorian' lat_var = tau_dataset.createVariable('lat', np.float32, ('lat', )) lat_var.units = 'degrees south' lat_var[:] = lats lon_var = tau_dataset.createVariable('lon', np.float32, ('lon', )) lat_var.units = 'degrees west/east' lon_var[:] = lons var_fields = { 'alpha': alpha_avg_field, 'u_ice': u_ice_avg_field, 'v_ice': v_ice_avg_field, 'h_ice': h_ice_avg_field, 'zonal_div': zonal_div_avg_field, 'merid_div': merid_div_avg_field, 'div': div_avg_field, 'hu_dadx': hu_dadx_avg_field, 'au_dhdx': au_dhdx_avg_field, 'ah_dudx': ah_dudx_avg_field, 'hv_dady': hv_dady_avg_field, 'av_dhdy': av_dhdy_avg_field, 'ah_dvdy': ah_dvdy_avg_field, 'div2': div2_avg_field } for var_name in var_fields.keys(): field_var = tau_dataset.createVariable(var_name, float, ('lat', 'lon'), zlib=True) field_var[:] = var_fields[var_name] tau_dataset.close()