def main(): # Get options args = options() # construct a dict with essential info case_config = { 'Avg': { 'start_date': args.start_date, 'end_date': args.end_date } } case_config['Case'] = { 'CASEROOT': args.caseroot, 'CIMEROOT': args.cimeroot } # Create the case instance dcase = DiagsCase(case_config['Case']) RUNDIR = dcase.get_value('RUNDIR') if args.debug: print('Run directory is:', RUNDIR) print('Casename is:', dcase.casename) # Update dict case_config['Case'].update({'RUNDIR': RUNDIR}) if not os.path.isdir(dcase.casename): print('Creating {}... \n'.format(dcase.casename)) os.system('mkdir -p {}'.format(dcase.casename)) make_yaml(dcase.casename, case_config) make_run_script(dcase.casename) else: print('Directory {} already exists. \n'.format(dcase.casename)) return
def main(stream=False): # Get options args = options() # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path, 'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case'], xrformat=True) print('Casename is:', dcase.casename) RUNDIR = dcase.get_value('RUNDIR') # read grid grd = MOM6grid(RUNDIR + '/' + dcase.casename + '.mom6.static.nc', xrformat=True) area = grd.area_t.where(grd.wet > 0) # Get masking for different regions depth = grd.depth_ocean.values # remove Nan's, otherwise genBasinMasks won't work depth[np.isnan(depth)] = 0.0 basin_code = genBasinMasks(grd.geolon.values, grd.geolat.values, depth, xda=True) #select a few basins, namely, Global, PersianGulf, Arctic, Pacific, Atlantic, Indian, Southern, LabSea and BaffinBay basins = basin_code.isel(region=[0, 1, 7, 8, 9, 10, 11, 12, 13]) if not args.diff_rms and not args.surface and not args.forcing: raise ValueError("Please select -diff_rms, -surface and/or -forcing.") if args.diff_rms: horizontal_mean_diff_rms(grd, dcase, basins, args) if args.surface: variables = ['SSH', 'tos', 'sos', 'mlotst', 'oml'] fname = '.mom6.sfc_*.nc' xystats(grd, dcase, basins, args) if args.forcing: variables = [ 'friver', 'ficeberg', 'fsitherm', 'hfsnthermds', 'sfdsi', 'hflso', 'seaice_melt_heat', 'wfo', 'hfds', 'Heat_PmE' ] fname = '.mom6.hm_*.nc' xystats(grd, dcase, basins, args) return
def main(stream=False): # Get options args = options() nw = args.number_of_workers if not os.path.isdir('PNG/HT'): print('Creating a directory to place figures (PNG/HT)... \n') os.system('mkdir -p PNG/HT') if not os.path.isdir('ncfiles'): print('Creating a directory to place figures (ncfiles)... \n') os.system('mkdir ncfiles') # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path,'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case']) args.case_name = dcase.casename args.savefigs = True; args.outdir = 'PNG/HT' RUNDIR = dcase.get_value('RUNDIR') print('Run directory is:', RUNDIR) print('Casename is:', dcase.casename) print('Variables to be processed:', args.variables) print('Number of workers to be used:', nw) # set avg dates avg = diag_config_yml['Avg'] if not args.start_date : args.start_date = avg['start_date'] if not args.end_date : args.end_date = avg['end_date'] # read grid info grd = MOM6grid(RUNDIR+'/'+dcase.casename+'.mom6.static.nc') depth = grd.depth_ocean # remote Nan's, otherwise genBasinMasks won't work depth[np.isnan(depth)] = 0.0 basin_code = m6toolbox.genBasinMasks(grd.geolon, grd.geolat, depth) parallel, cluster, client = m6toolbox.request_workers(nw) print('Reading dataset...') startTime = datetime.now() variables = args.variables def preprocess(ds): ''' Compute montly averages and return the dataset with variables''' for var in variables: print('Processing {}'.format(var)) if var not in ds.variables: print('WARNING: ds does not have variable {}. Creating dataarray with zeros'.format(var)) jm, im = grd.geolat.shape tm = len(ds.time) da = xr.DataArray(np.zeros((tm, jm, im)), dims=['time','yq','xh'], \ coords={'yq' : grd.yq, 'xh' : grd.xh, 'time' : ds.time}).rename(var) ds = xr.merge([ds, da]) #return ds[variables].resample(time="1Y", closed='left', \ # keep_attrs=True).mean(dim='time', keep_attrs=True) return ds[variables] if parallel: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+'.mom6.hm_*.nc', \ parallel=True, data_vars='minimal', chunks={'time': 12},\ coords='minimal', compat='override', preprocess=preprocess) else: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+'.mom6.hm_*.nc', \ data_vars='minimal', coords='minimal', compat='override', \ preprocess=preprocess) print('Time elasped: ', datetime.now() - startTime) print('Selecting data between {} and {}...'.format(args.start_date, args.end_date)) startTime = datetime.now() ds_sel = ds.sel(time=slice(args.start_date, args.end_date)) print('Time elasped: ', datetime.now() - startTime) print('Computing yearly means...') startTime = datetime.now() ds_sel = ds_sel.resample(time="1Y", closed='left',keep_attrs=True).mean('time',keep_attrs=True) print('Time elasped: ', datetime.now() - startTime) print('Computing time mean...') startTime = datetime.now() ds_sel = ds_sel.mean('time').load() print('Time elasped: ', datetime.now() - startTime) if parallel: print('Releasing workers...') client.close(); cluster.close() varName = 'T_ady_2d' print('Saving netCDF files...') attrs = {'description': 'Time-mean poleward heat transport by components ', 'units': ds[varName].units, 'start_date': args.start_date, 'end_date': args.end_date, 'casename': dcase.casename} m6toolbox.add_global_attrs(ds_sel,attrs) ds_sel.to_netcdf('ncfiles/'+dcase.casename+'_heat_transport.nc') # create a ndarray subclass class C(np.ndarray): pass if varName in ds.variables: tmp = np.ma.masked_invalid(ds_sel[varName].values) tmp = tmp[:].filled(0.) advective = tmp.view(C) advective.units = ds[varName].units else: raise Exception('Could not find "T_ady_2d" in file "%s"'%(args.infile+args.monthly)) varName = 'T_diffy_2d' if varName in ds.variables: tmp = np.ma.masked_invalid(ds_sel[varName].values) tmp = tmp[:].filled(0.) diffusive = tmp.view(C) diffusive.units = ds[varName].units else: diffusive = None warnings.warn('Diffusive temperature term not found. This will result in an underestimation of the heat transport.') varName = 'T_lbd_diffy_2d' if varName in ds.variables: tmp = np.ma.masked_invalid(ds_sel[varName].values) tmp = tmp[:].filled(0.) lbd = tmp.view(C) #lbd.units = ds[varName].units else: lbd = None warnings.warn('Lateral boundary mixing term not found. This will result in an underestimation of the heat transport.') plt_heat_transport_model_vs_obs(advective, diffusive, lbd, basin_code, grd, args) return
def main(stream=False): # Get options cmdLineArgs = options() if cmdLineArgs.save_ncfile: if not os.path.isdir('ncfiles'): print('Creating a directory to place figures (ncfiles)... \n') os.system('mkdir ncfiles') # Read in the yaml file diag_config_yml = yaml.load(open(cmdLineArgs.diag_config_yml_path, 'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case']) cmdLineArgs.case_name = dcase.casename cmdLineArgs.infile = dcase.get_value('RUNDIR') if cmdLineArgs.infile[-1] != '/': cmdLineArgs.infile = cmdLineArgs.infile + '/' if cmdLineArgs.debug: print('Run directory is:', cmdLineArgs.infile) print('Casename is:', cmdLineArgs.case_name) reference = 'Griffies et al., 2016: OMIP contribution to CMIP6: experimental and diagnostic protocol for the physical component of the Ocean Model Intercomparison Project. Geosci. Model. Dev., 9, 3231-3296. doi:10.5194/gmd-9-3231-2016' title = 'Griffies et al., 2016, Geosci. Model. Dev., 9, 3231-3296. doi:10.5194/gmd-9-3231-2016' observedFlows = { 'reference': reference, 'title': title, 'Agulhas': (164.4, 182.2), 'Barents Opening': 2.0, 'Bering Strait': (0.7, 1.1), 'Davis Strait': (-2.1, -1.1), 'Denmark Strait': (-4.8, -2.0), 'Drake Passage': (164.4, 182.2), 'English Channel': (0.01, 0.1), 'Faroe-Scotland': (0.8, 1.0), 'Florida-Bahamas': (28.9, 34.3), 'Fram Strait': (-4.7, 0.7), 'Gibraltar Strait': 0.11, 'Gibraltar Strait': (-1.0, 1.0), 'Iceland-Faroe': (4.35, 4.85), 'Indonesian Throughflow': (-15., -13.), 'Mozambique Channel': (-25.6, -7.8), 'Pacific Equatorial Undercurrent': (24.5, 28.3), 'Taiwan-Luzon Strait': (-3.0, -1.8), 'Windward Passage': (-15., 5.) } # GMM, we need some estimated transport for the following: # Bab_al_mandeb_Strait # Iceland-Norway # Hormuz Strait plotSections = [] # leaving this here to catch if start/end years outside the range of the dataset res = Transport(cmdLineArgs, 'agulhas_section', 'umo', label='Agulhas', ylim=(100, 200)) try: res = Transport(cmdLineArgs, 'agulhas_section', 'umo', label='Agulhas', ylim=(100, 250)) plotSections.append(res) except: print('WARNING: unable to process Agulhas_section') try: res = Transport(cmdLineArgs, 'Bab_al_mandeb_Strait', 'umo', label='Bab al mandeb Strait', ylim=(-0.5, 0.5)) plotSections.append(res) except: print('WARNING: unable to process Bab Al Mandeb Strait') try: res = Transport(cmdLineArgs, 'Bering_Strait', 'vmo', label='Bering Strait', ylim=(-2, 3)) plotSections.append(res) except: print('WARNING: unable to process Bering_Strait') try: res = Transport(cmdLineArgs, 'Barents_opening', 'vmo', label='Barents Opening', ylim=(-1, 9)) plotSections.append(res) except: print('WARNING: unable to process Barents_opening') try: res = Transport(cmdLineArgs, 'Davis_Strait', 'vmo', label='Davis Strait', ylim=(-5.0, 0.5)) plotSections.append(res) except: print('WARNING: unable to process Davis_Strait') try: res = Transport(cmdLineArgs, 'Denmark_Strait', 'vmo', label='Denmark Strait', ylim=(-12, 2)) plotSections.append(res) except: print('WARNING: unable to process Denmark_Strait') try: res = Transport(cmdLineArgs, 'Drake_Passage', 'umo', label='Drake Passage', ylim=(100, 250)) plotSections.append(res) except: print('WARNING: unable to process Drake_Passage') try: res = Transport(cmdLineArgs, 'English_Channel', 'umo', label='English Channel', ylim=(-0.4, 0.4)) plotSections.append(res) except: print('WARNING: unable to process English_Channel') #try: res = Transport(cmdLineArgs,'Faroe_Scotland','umo',label='Faroe-Scotland',ylim=(-5,12)); plotSections.append(res) #except: print('WARNING: unable to process Faroe_Scotland') try: res = Transport(cmdLineArgs, 'Florida_Bahamas', 'vmo', label='Florida-Bahamas', ylim=(5, 35)) plotSections.append(res) except: print('WARNING: unable to process Florida_Bahamas') try: res = Transport(cmdLineArgs, 'Fram_Strait', 'vmo', label='Fram Strait', ylim=(-8, 4)) plotSections.append(res) except: print('WARNING: unable to process Fram_Strait') try: res = Transport(cmdLineArgs, 'Gibraltar_Strait', 'umo', label='Gibraltar Strait', ylim=(-1.0, 1.0)) plotSections.append(res) except: print('WARNING: unable to process Gibraltar_Strait') try: res = Transport(cmdLineArgs, 'Hormuz_Strait', 'umo', label='Hormuz Strait', ylim=(-0.5, 0.5)) plotSections.append(res) except: print('WARNING: unable to process Hormuz_Strait') #try: res = Transport(cmdLineArgs,['Iceland_Faroe_U','Iceland_Faroe_V'],['umo','vmo'],label='Iceland-Faroe'); plotSections.append(res) #except: print('WARNING: unable to process Iceland_Faroe_U and Iceland_Faroe_V') try: res = Transport(cmdLineArgs, 'Iceland_Norway', 'vmo', label='Iceland-Norway', ylim=(-5, 15)) plotSections.append(res) except: print('WARNING: unable to process Iceland_Norway') try: res = Transport(cmdLineArgs, 'Indonesian_Throughflow', 'vmo', label='Indonesian Throughflow', ylim=(-40, 10)) plotSections.append(res) except: print('WARNING: unable to process Indonesian_Throughflow') try: res = Transport(cmdLineArgs, 'Mozambique_Channel', 'vmo', label='Mozambique Channel', ylim=(-50, 10)) plotSections.append(res) except: print('WARNING: unable to process Mozambique_Channel') try: res = Transport(cmdLineArgs, 'Pacific_undercurrent', 'umo', label='Pacific Equatorial Undercurrent', ylim=None, zlim=(0, 350)) plotSections.append(res) except: print('WARNING: unable to process Pacific_undercurrent') try: res = Transport(cmdLineArgs, 'Taiwan_Luzon', 'umo', label='Taiwan-Luzon Strait', ylim=(-15, 10)) plotSections.append(res) except: print('WARNING: unable to process Taiwan_Luzon') try: res = Transport(cmdLineArgs, 'Windward_Passage', 'vmo', label='Windward Passage', ylim=(-10, 10)) plotSections.append(res) except: print('WARNING: unable to process Windward_Passage') if cmdLineArgs.save_ncfile: print('Saving netCDF file with transports...\n') # create a dataaray labels = [] for n in range(len(plotSections)): labels.append(plotSections[n].label) var = numpy.zeros((len(plotSections), len(plotSections[0].time))) ds = xr.Dataset(data_vars={'transport': (('sections', 'time'), var)}, coords={ 'sections': labels, 'time': plotSections[0].time }) for n in range(len(plotSections)): ds.transport.values[n, :] = plotSections[n].data ds.to_netcdf('ncfiles/' + cmdLineArgs.case_name + '_section_transports.nc') print('Plotting {} sections...\n'.format(len(plotSections))) imgbufs = [] fig = plt.figure(figsize=(13, 17)) for n in range(0, len(plotSections)): plotPanel(plotSections[n], n, observedFlows=observedFlows) fig.text(0.5, 0.955, str(plotSections[n - 1].case_name), horizontalalignment='center', fontsize=14) fig.text(0.5, 0.925, 'Observations summarized in ' + observedFlows['title'], horizontalalignment='center', fontsize=11) plt.subplots_adjust(hspace=0.3) if stream != None: fig.text(0.5, 0.05, str('Generated by mom6-tools'), horizontalalignment='center', fontsize=12) #plt.show(block=False) if stream is True: objOut = io.BytesIO() else: if not os.path.isdir(cmdLineArgs.outdir): os.system('mkdir -p ' + cmdLineArgs.outdir) objOut = cmdLineArgs.outdir + '/' + cmdLineArgs.case_name + '_section_transports.png' plt.savefig(objOut) if stream is True: imgbufs.append(objOut) if stream is True: return imgbufs
def main(): # Get options args = options() nw = args.number_of_workers if not os.path.isdir('PNG/MOC'): print('Creating a directory to place figures (PNG/MOC)... \n') os.system('mkdir -p PNG/MOC') if not os.path.isdir('ncfiles'): print('Creating a directory to place figures (ncfiles)... \n') os.system('mkdir ncfiles') # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path, 'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case']) args.case_name = dcase.casename args.savefigs = True args.outdir = 'PNG/MOC/' RUNDIR = dcase.get_value('RUNDIR') print('Run directory is:', RUNDIR) print('Casename is:', dcase.casename) print('Number of workers to be used:', nw) # set avg dates avg = diag_config_yml['Avg'] if not args.start_date: args.start_date = avg['start_date'] if not args.end_date: args.end_date = avg['end_date'] # read grid info grd = MOM6grid(RUNDIR + '/' + dcase.casename + '.mom6.static.nc') depth = grd.depth_ocean # remote Nan's, otherwise genBasinMasks won't work depth[np.isnan(depth)] = 0.0 basin_code = m6toolbox.genBasinMasks(grd.geolon, grd.geolat, depth) parallel, cluster, client = m6toolbox.request_workers(nw) print('Reading {} dataset...'.format(args.file_name)) startTime = datetime.now() # load data def preprocess(ds): variables = ['vmo', 'vhml', 'vhGM'] for v in variables: if v not in ds.variables: ds[v] = xr.zeros_like(ds.vo) return ds[variables] if parallel: ds = xr.open_mfdataset( RUNDIR + '/' + dcase.casename + args.file_name, parallel=True, combine="nested", # concatenate in order of files concat_dim="time", # concatenate along time preprocess=preprocess, ).chunk({"time": 12}) else: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+args.file_name, data_vars='minimal', \ coords='minimal', compat='override', preprocess=preprocess) print('Time elasped: ', datetime.now() - startTime) # compute yearly means first since this will be used in the time series print('Computing yearly means...') startTime = datetime.now() ds_yr = ds.resample(time="1Y", closed='left').mean('time') print('Time elasped: ', datetime.now() - startTime) print('Selecting data between {} and {}...'.format(args.start_date, args.end_date)) startTime = datetime.now() ds_sel = ds_yr.sel(time=slice(args.start_date, args.end_date)) print('Time elasped: ', datetime.now() - startTime) print('Computing time mean...') startTime = datetime.now() ds_mean = ds_sel.mean('time').compute() print('Time elasped: ', datetime.now() - startTime) # create a ndarray subclass class C(np.ndarray): pass varName = 'vmo' conversion_factor = 1.e-9 tmp = np.ma.masked_invalid(ds_mean[varName].values) tmp = tmp[:].filled(0.) VHmod = tmp.view(C) VHmod.units = ds[varName].units Zmod = m6toolbox.get_z(ds, depth, varName) # same here if args.case_name != '': case_name = args.case_name else: case_name = '' # Global MOC m6plot.setFigureSize([16, 9], 576, debug=False) axis = plt.gca() cmap = plt.get_cmap('dunnePM') zg = Zmod.min(axis=-1) psiPlot = MOCpsi(VHmod) * conversion_factor psiPlot = 0.5 * (psiPlot[0:-1, :] + psiPlot[1::, :]) yyg = grd.geolat_c[:, :].max(axis=-1) + 0 * zg ci = m6plot.pmCI(0., 40., 5.) plotPsi( yyg, zg, psiPlot, ci, 'Global MOC [Sv],' + 'averaged between ' + args.start_date + ' and ' + args.end_date) plt.xlabel(r'Latitude [$\degree$N]') plt.suptitle(case_name) plt.gca().invert_yaxis() findExtrema(yyg, zg, psiPlot, max_lat=-30.) findExtrema(yyg, zg, psiPlot, min_lat=25., min_depth=250.) findExtrema(yyg, zg, psiPlot, min_depth=2000., mult=-1.) objOut = args.outdir + str(case_name) + '_MOC_global.png' plt.savefig(objOut) if 'zl' in ds: zl = ds.zl.values elif 'z_l' in ds: zl = ds.z_l.values else: raise ValueError("Dataset does not have vertical coordinate zl or z_l") # create dataset to store results moc = xr.Dataset(data_vars={ 'moc': (('zl', 'yq'), psiPlot), 'amoc': (('zl', 'yq'), np.zeros((psiPlot.shape))), 'moc_FFM': (('zl', 'yq'), np.zeros((psiPlot.shape))), 'moc_GM': (('zl', 'yq'), np.zeros((psiPlot.shape))), 'amoc_45': (('time'), np.zeros((ds_yr.time.shape))), 'moc_GM_ACC': (('time'), np.zeros((ds_yr.time.shape))), 'amoc_26': (('time'), np.zeros((ds_yr.time.shape))) }, coords={ 'zl': zl, 'yq': ds.yq, 'time': ds_yr.time }) attrs = { 'description': 'MOC time-mean sections and time-series', 'units': 'Sv', 'start_date': avg['start_date'], 'end_date': avg['end_date'], 'casename': dcase.casename } m6toolbox.add_global_attrs(moc, attrs) # Atlantic MOC m6plot.setFigureSize([16, 9], 576, debug=False) cmap = plt.get_cmap('dunnePM') m = 0 * basin_code m[(basin_code == 2) | (basin_code == 4) | (basin_code == 6) | (basin_code == 7) | (basin_code == 8)] = 1 ci = m6plot.pmCI(0., 22., 2.) z = (m * Zmod).min(axis=-1) psiPlot = MOCpsi(VHmod, vmsk=m * np.roll(m, -1, axis=-2)) * conversion_factor psiPlot = 0.5 * (psiPlot[0:-1, :] + psiPlot[1::, :]) yy = grd.geolat_c[:, :].max(axis=-1) + 0 * z plotPsi( yy, z, psiPlot, ci, 'Atlantic MOC [Sv],' + 'averaged between ' + args.start_date + ' and ' + args.end_date) plt.xlabel(r'Latitude [$\degree$N]') plt.suptitle(case_name) plt.gca().invert_yaxis() findExtrema(yy, z, psiPlot, min_lat=26.5, max_lat=27., min_depth=250.) # RAPID findExtrema(yy, z, psiPlot, max_lat=-33.) findExtrema(yy, z, psiPlot) findExtrema(yy, z, psiPlot, min_lat=5.) objOut = args.outdir + str(case_name) + '_MOC_Atlantic.png' plt.savefig(objOut, format='png') moc['amoc'].data = psiPlot print('Plotting AMOC profile at 26N...') rapid_vertical = xr.open_dataset( '/glade/work/gmarques/cesm/datasets/RAPID/moc_vertical.nc') fig, ax = plt.subplots(nrows=1, ncols=1) ax.plot(rapid_vertical.stream_function_mar.mean('time'), rapid_vertical.depth, 'k', label='RAPID') ax.plot(moc['amoc'].sel(yq=26, method='nearest'), zl, label=case_name) ax.legend() plt.gca().invert_yaxis() plt.grid() ax.set_xlabel('AMOC @ 26N [Sv]') ax.set_ylabel('Depth [m]') objOut = args.outdir + str(case_name) + '_MOC_profile_26N.png' plt.savefig(objOut, format='png') print('Computing time series...') startTime = datetime.now() # time-series dtime = ds_yr.time amoc_26 = np.zeros(len(dtime)) amoc_45 = np.zeros(len(dtime)) moc_GM_ACC = np.zeros(len(dtime)) if args.debug: startTime = datetime.now() # loop in time for t in range(len(dtime)): tmp = np.ma.masked_invalid(ds_yr[varName][t, :].values) tmp = tmp[:].filled(0.) # m is still Atlantic ocean psi = MOCpsi(tmp, vmsk=m * np.roll(m, -1, axis=-2)) * conversion_factor psi = 0.5 * (psi[0:-1, :] + psi[1::, :]) amoc_26[t] = findExtrema(yy, z, psi, min_lat=26., max_lat=27., plot=False, min_depth=250.) amoc_45[t] = findExtrema(yy, z, psi, min_lat=44., max_lat=46., plot=False, min_depth=250.) tmp_GM = np.ma.masked_invalid(ds_yr['vhGM'][t, :].values) tmp_GM = tmp_GM[:].filled(0.) psiGM = MOCpsi(tmp_GM) * conversion_factor psiGM = 0.5 * (psiGM[0:-1, :] + psiGM[1::, :]) moc_GM_ACC[t] = findExtrema(yyg, zg, psiGM, min_lat=-65., max_lat=-30, mult=-1., plot=False) print('Time elasped: ', datetime.now() - startTime) # add dataarays to the moc dataset moc['amoc_26'].data = amoc_26 moc['amoc_45'].data = amoc_45 moc['moc_GM_ACC'].data = moc_GM_ACC if parallel: print('Releasing workers ...') client.close() cluster.close() print('Plotting...') # load AMOC time series data (5th) cycle used in Danabasoglu et al., doi:10.1016/j.ocemod.2015.11.007 path = '/glade/p/cesm/omwg/amoc/COREII_AMOC_papers/papers/COREII.variability/data.original/' amoc_core_26 = xr.open_dataset(path + 'AMOCts.cyc5.26p5.nc') # load AMOC from POP JRA-55 amoc_pop_26 = xr.open_dataset( '/glade/u/home/bryan/MOM6-modeloutputanalysis/' 'AMOC_series_26n.g210.GIAF_JRA.v13.gx1v7.01.nc') # load RAPID time series rapid = xr.open_dataset( '/glade/work/gmarques/cesm/datasets/RAPID/moc_transports.nc').resample( time="1Y", closed='left', keep_attrs=True).mean('time', keep_attrs=True) # plot fig = plt.figure(figsize=(12, 6)) plt.plot(np.arange(len(moc.time)) + 1958.5, moc['amoc_26'].values, color='k', label=case_name, lw=2) # core data core_mean = amoc_core_26['MOC'].mean(axis=0).data core_std = amoc_core_26['MOC'].std(axis=0).data plt.plot(amoc_core_26.time, core_mean, 'k', label='CORE II (group mean)', color='#1B2ACC', lw=1) plt.fill_between(amoc_core_26.time, core_mean - core_std, core_mean + core_std, alpha=0.25, edgecolor='#1B2ACC', facecolor='#089FFF') # pop data plt.plot(np.arange(len(amoc_pop_26.time)) + 1958.5, amoc_pop_26.AMOC_26n.values, color='r', label='POP', lw=1) # rapid plt.plot(np.arange(len(rapid.time)) + 2004.5, rapid.moc_mar_hc10.values, color='green', label='RAPID', lw=1) plt.title('AMOC @ 26 $^o$ N', fontsize=16) plt.ylim(5, 20) plt.xlim(1948, 1958.5 + len(moc.time)) plt.xlabel('Time [years]', fontsize=16) plt.ylabel('Sv', fontsize=16) plt.legend(fontsize=13, ncol=2) objOut = args.outdir + str(case_name) + '_MOC_26N_time_series.png' plt.savefig(objOut, format='png') amoc_core_45 = xr.open_dataset(path + 'AMOCts.cyc5.45.nc') amoc_pop_45 = xr.open_dataset( '/glade/u/home/bryan/MOM6-modeloutputanalysis/' 'AMOC_series_45n.g210.GIAF_JRA.v13.gx1v7.01.nc') # plot fig = plt.figure(figsize=(12, 6)) plt.plot(np.arange(len(moc.time)) + 1958.5, moc['amoc_45'], color='k', label=case_name, lw=2) # core data core_mean = amoc_core_45['MOC'].mean(axis=0).data core_std = amoc_core_45['MOC'].std(axis=0).data plt.plot(amoc_core_45.time, core_mean, 'k', label='CORE II (group mean)', color='#1B2ACC', lw=2) plt.fill_between(amoc_core_45.time, core_mean - core_std, core_mean + core_std, alpha=0.25, edgecolor='#1B2ACC', facecolor='#089FFF') # pop data plt.plot(np.arange(len(amoc_pop_45.time)) + 1958.5, amoc_pop_45.AMOC_45n.values, color='r', label='POP', lw=1) plt.title('AMOC @ 45 $^o$ N', fontsize=16) plt.ylim(5, 20) plt.xlim(1948, 1958 + len(moc.time)) plt.xlabel('Time [years]', fontsize=16) plt.ylabel('Sv', fontsize=16) plt.legend(fontsize=14) objOut = args.outdir + str(case_name) + '_MOC_45N_time_series.png' plt.savefig(objOut, format='png') # Submesoscale-induced Global MOC class C(np.ndarray): pass varName = 'vhml' conversion_factor = 1.e-9 tmp = np.ma.masked_invalid(ds_mean[varName].values) tmp = tmp[:].filled(0.) VHml = tmp.view(C) VHml.units = ds[varName].units Zmod = m6toolbox.get_z(ds, depth, varName) # same here m6plot.setFigureSize([16, 9], 576, debug=False) axis = plt.gca() cmap = plt.get_cmap('dunnePM') z = Zmod.min(axis=-1) psiPlot = MOCpsi(VHml) * conversion_factor psiPlot = 0.5 * (psiPlot[0:-1, :] + psiPlot[1::, :]) yy = grd.geolat_c[:, :].max(axis=-1) + 0 * z ci = m6plot.pmCI(0., 20., 2.) plotPsi(yy, z, psiPlot, ci, 'Global FFH MOC [Sv],' + 'averaged between ' + args.start_date + ' and ' + args.end_date, zval=[0., -400., -1000.]) plt.xlabel(r'Latitude [$\degree$N]') plt.suptitle(case_name) plt.gca().invert_yaxis() objOut = args.outdir + str(case_name) + '_FFH_MOC_global.png' plt.savefig(objOut) moc['moc_FFM'].data = psiPlot # GM-induced Global MOC class C(np.ndarray): pass varName = 'vhGM' conversion_factor = 1.e-9 tmp = np.ma.masked_invalid(ds_mean[varName].values) tmp = tmp[:].filled(0.) VHGM = tmp.view(C) VHGM.units = ds[varName].units Zmod = m6toolbox.get_z(ds, depth, varName) # same here m6plot.setFigureSize([16, 9], 576, debug=False) axis = plt.gca() cmap = plt.get_cmap('dunnePM') z = Zmod.min(axis=-1) psiPlot = MOCpsi(VHGM) * conversion_factor psiPlot = 0.5 * (psiPlot[0:-1, :] + psiPlot[1::, :]) yy = grd.geolat_c[:, :].max(axis=-1) + 0 * z ci = m6plot.pmCI(0., 20., 2.) plotPsi( yy, z, psiPlot, ci, 'Global GM MOC [Sv],' + 'averaged between ' + args.start_date + ' and ' + args.end_date) plt.xlabel(r'Latitude [$\degree$N]') plt.suptitle(case_name) plt.gca().invert_yaxis() findExtrema(yy, z, psiPlot, min_lat=-65., max_lat=-30, mult=-1.) objOut = args.outdir + str(case_name) + '_GM_MOC_global.png' plt.savefig(objOut) moc['moc_GM'].data = psiPlot print('Saving netCDF files...') moc.to_netcdf('ncfiles/' + str(case_name) + '_MOC.nc') return
def driver(args): nw = args.number_of_workers if not os.path.isdir('PNG/TS_levels'): print('Creating a directory to place figures (PNG)... \n') os.system('mkdir -p PNG/TS_levels') if not os.path.isdir('ncfiles'): print('Creating a directory to place netCDF files (ncfiles)... \n') os.system('mkdir ncfiles') # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path, 'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case']) RUNDIR = dcase.get_value('RUNDIR') args.casename = dcase.casename print('Run directory is:', RUNDIR) print('Casename is:', args.casename) print('Number of workers: ', nw) # set avg dates avg = diag_config_yml['Avg'] if not args.start_date: args.start_date = avg['start_date'] if not args.end_date: args.end_date = avg['end_date'] # read grid info grd = MOM6grid(RUNDIR + '/' + args.casename + '.mom6.static.nc') grd_xr = MOM6grid(RUNDIR + '/' + args.casename + '.mom6.static.nc', xrformat=True) # create masks depth = grd.depth_ocean # remote Nan's, otherwise genBasinMasks won't work depth[np.isnan(depth)] = 0.0 basin_code = genBasinMasks(grd.geolon, grd.geolat, depth, xda=True) # TODO: improve how obs are selected if args.obs == 'PHC2': # load PHC2 data obs_path = '/glade/p/cesm/omwg/obs_data/phc/' obs_temp = xr.open_mfdataset(obs_path + 'PHC2_TEMP_tx0.66v1_34lev_ann_avg.nc', decode_coords=False, decode_times=False) obs_salt = xr.open_mfdataset(obs_path + 'PHC2_SALT_tx0.66v1_34lev_ann_avg.nc', decode_coords=False, decode_times=False) elif args.obs == 'WOA18': # load WOA18 data obs_path = '/glade/u/home/gmarques/Notebooks/CESM_MOM6/WOA18_remapping/' obs_temp = xr.open_dataset( obs_path + 'WOA18_TEMP_tx0.66v1_34lev_ann_avg.nc', decode_times=False).rename({'theta0': 'TEMP'}) obs_salt = xr.open_dataset(obs_path + 'WOA18_SALT_tx0.66v1_34lev_ann_avg.nc', decode_times=False).rename({'s_an': 'SALT'}) else: raise ValueError("The obs selected is not available.") parallel, cluster, client = request_workers(nw) print('Reading surface dataset...') startTime = datetime.now() variables = ['thetao', 'so', 'time', 'time_bnds'] def preprocess(ds): ''' Compute montly averages and return the dataset with variables''' return ds[variables] #.resample(time="1Y", closed='left', \ #keep_attrs=True).mean(dim='time', keep_attrs=True) if parallel: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+'.mom6.h_*.nc', \ parallel=True, data_vars='minimal', \ coords='minimal', compat='override', preprocess=preprocess) else: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+'.mom6.h_*.nc', \ data_vars='minimal', coords='minimal', compat='override', preprocess=preprocess) print('Time elasped: ', datetime.now() - startTime) print('Selecting data between {} and {}...'.format(args.start_date, args.end_date)) startTime = datetime.now() ds = ds.sel(time=slice(args.start_date, args.end_date)) print('Time elasped: ', datetime.now() - startTime) print('\n Computing yearly means...') startTime = datetime.now() ds = ds.resample(time="1Y", closed='left', keep_attrs=True).mean('time', keep_attrs=True) print('Time elasped: ', datetime.now() - startTime) print('Time averaging...') startTime = datetime.now() temp = np.ma.masked_invalid(ds.thetao.mean('time').values) salt = np.ma.masked_invalid(ds.so.mean('time').values) print('Time elasped: ', datetime.now() - startTime) print('Computing stats for different basins...') startTime = datetime.now() # construct a 3D area with land values masked area = np.ma.masked_where(grd.wet == 0, grd.area_t) tmp = np.repeat(area[np.newaxis, :, :], len(obs_temp.depth), axis=0) area_mom3D = xr.DataArray(tmp, dims=('depth', 'yh', 'xh'), coords={ 'depth': obs_temp.depth.values, 'yh': grd.yh, 'xh': grd.xh }) for k in range(len(area_mom3D.depth)): area_mom3D[k, :] = grd_xr.area_t.where( grd_xr.depth_ocean >= area_mom3D.depth[k]) # temp thetao_mean = ds.thetao.mean('time') temp_diff = thetao_mean.rename({ 'z_l': 'depth' }).rename('TEMP') - obs_temp['TEMP'] temp_stats = myStats_da(temp_diff, area_mom3D, basins=basin_code).rename('thetao_bias_stats') # salt so_mean = ds.so.mean('time') salt_diff = so_mean.rename({ 'z_l': 'depth' }).rename('SALT') - obs_salt['SALT'] salt_stats = myStats_da(salt_diff, area_mom3D, basins=basin_code).rename('so_bias_stats') # plots depth = temp_stats.depth.values basin = temp_stats.basin.values interfaces = np.zeros(len(depth) + 1) for k in range(1, len(depth) + 1): interfaces[k] = interfaces[k - 1] + (2 * (depth[k - 1] - interfaces[k - 1])) reg = np.arange(len(temp_stats.basin.values) + 1) figname = 'PNG/TS_levels/' + str(dcase.casename) + '_' temp_label = r'Potential temperature [$^o$C]' salt_label = 'Salinity [psu]' # minimum score_plot2(basin, interfaces, temp_stats[:, 0, :], nbins=30, cmap=plt.cm.viridis, cmin=temp_stats[:, 0, :].min().values, units=temp_label, fname=figname + 'thetao_bias_min.png', title='Minimun temperature difference (model-{})'.format( args.obs)) score_plot2(basin, interfaces, salt_stats[:, 0, :], nbins=30, cmap=plt.cm.viridis, cmin=salt_stats[:, 0, :].min().values, units=salt_label, fname=figname + 'so_bias_min.png', title='Minimun salinity difference (model-{})'.format( args.obs)) # maximum score_plot2(basin, interfaces, temp_stats[:, 1, :], nbins=30, cmap=plt.cm.viridis, cmin=temp_stats[:, 1, :].min().values, units=temp_label, fname=figname + 'thetao_bias_max.png', title='Maximum temperature difference (model-{})'.format( args.obs)) score_plot2(basin, interfaces, salt_stats[:, 1, :], nbins=30, cmap=plt.cm.viridis, cmin=salt_stats[:, 1, :].min().values, units=salt_label, fname=figname + 'so_bias_max.png', title='Maximum salinity difference (model-{})'.format( args.obs)) # mean score_plot2(basin, interfaces, temp_stats[:, 2, :], nbins=30, cmap=plt.cm.seismic, units=temp_label, fname=figname + 'thetao_bias_mean.png', title='Mean temperature difference (model-{})'.format( args.obs)) score_plot2(basin, interfaces, salt_stats[:, 2, :], nbins=30, cmap=plt.cm.seismic, units=salt_label, fname=figname + 'so_bias_mean.png', title='Mean salinity difference (model-{})'.format(args.obs)) # std score_plot2(basin, interfaces, temp_stats[:, 3, :], nbins=30, cmap=plt.cm.viridis, cmin=1.0E-15, units=temp_label, fname=figname + 'thetao_bias_std.png', title='Std temperature difference (model-{})'.format(args.obs)) score_plot2(basin, interfaces, salt_stats[:, 3, :], nbins=30, cmap=plt.cm.viridis, cmin=1.0E-15, units=salt_label, fname=figname + 'so_bias_std.png', title='Std salinity difference (model-{})'.format(args.obs)) # rms score_plot2(basin, interfaces, temp_stats[:, 4, :], nbins=30, cmap=plt.cm.viridis, cmin=1.0E-15, units=temp_label, fname=figname + 'thetao_bias_rms.png', title='Rms temperature difference (model-{})'.format(args.obs)) score_plot2(basin, interfaces, salt_stats[:, 4, :], nbins=30, cmap=plt.cm.viridis, cmin=1.0E-15, units=salt_label, fname=figname + 'so_bias_rms.png', title='Rms salinity difference (model-{})'.format(args.obs)) print('Time elasped: ', datetime.now() - startTime) print('Saving netCDF files...') startTime = datetime.now() attrs = { 'description': 'model - obs at depth levels', 'start_date': args.start_date, 'end_date': args.end_date, 'casename': dcase.casename, 'obs': args.obs, 'module': os.path.basename(__file__) } # create dataset to store results add_global_attrs(temp_stats, attrs) temp_stats.to_netcdf('ncfiles/' + str(args.casename) + '_thetao_bias_ann_mean_stats.nc') add_global_attrs(salt_stats, attrs) salt_stats.to_netcdf('ncfiles/' + str(args.casename) + '_so_bias_ann_mean_stats.nc') thetao = xr.DataArray(thetao_mean, dims=['z_l', 'yh', 'xh'], coords={ 'z_l': ds.z_l, 'yh': grd.yh, 'xh': grd.xh }).rename('thetao') temp_bias = np.ma.masked_invalid(thetao.values - obs_temp['TEMP'].values) ds_thetao = xr.Dataset(data_vars={ 'thetao': (('z_l', 'yh', 'xh'), thetao), 'thetao_bias': (('z_l', 'yh', 'xh'), temp_bias) }, coords={ 'z_l': ds.z_l, 'yh': grd.yh, 'xh': grd.xh }) add_global_attrs(ds_thetao, attrs) ds_thetao.to_netcdf('ncfiles/' + str(args.casename) + '_thetao_time_mean.nc') so = xr.DataArray(ds.so.mean('time'), dims=['z_l', 'yh', 'xh'], coords={ 'z_l': ds.z_l, 'yh': grd.yh, 'xh': grd.xh }).rename('so') salt_bias = np.ma.masked_invalid(so.values - obs_salt['SALT'].values) ds_so = xr.Dataset(data_vars={ 'so': (('z_l', 'yh', 'xh'), so), 'so_bias': (('z_l', 'yh', 'xh'), salt_bias) }, coords={ 'z_l': ds.z_l, 'yh': grd.yh, 'xh': grd.xh }) add_global_attrs(ds_so, attrs) ds_so.to_netcdf('ncfiles/' + str(args.casename) + '_so_time_mean.nc') print('Time elasped: ', datetime.now() - startTime) if parallel: print('\n Releasing workers...') client.close() cluster.close() print('Global plots...') km = len(obs_temp['depth']) for k in range(km): if ds['z_l'][k].values < 1200.0: figname = 'PNG/TS_levels/' + str(dcase.casename) + '_' + str( ds['z_l'][k].values) + '_' temp_obs = np.ma.masked_invalid(obs_temp['TEMP'][k, :].values) xycompare(temp[k, :], temp_obs, grd.geolon, grd.geolat, area=grd.area_t, title1='model temperature, depth =' + str(ds['z_l'][k].values) + 'm', title2='observed temperature, depth =' + str(obs_temp['depth'][k].values) + 'm', suptitle=dcase.casename + ', averaged ' + str(args.start_date) + ' to ' + str(args.end_date), extend='both', dextend='neither', clim=(-1.9, 30.), dlim=(-2, 2), dcolormap=plt.cm.bwr, save=figname + 'global_temp.png') salt_obs = np.ma.masked_invalid(obs_salt['SALT'][k, :].values) xycompare(salt[k, :], salt_obs, grd.geolon, grd.geolat, area=grd.area_t, title1='model salinity, depth =' + str(ds['z_l'][k].values) + 'm', title2='observed salinity, depth =' + str(obs_temp['depth'][k].values) + 'm', suptitle=dcase.casename + ', averaged ' + str(args.start_date) + ' to ' + str(args.end_date), extend='both', dextend='neither', clim=(30., 39.), dlim=(-2, 2), dcolormap=plt.cm.bwr, save=figname + 'global_salt.png') print('Antarctic plots...') for k in range(km): if (ds['z_l'][k].values < 1200.): temp_obs = np.ma.masked_invalid(obs_temp['TEMP'][k, :].values) polarcomparison(temp[k, :], temp_obs, grd, title1='model temperature, depth =' + str(ds['z_l'][k].values) + 'm', title2='observed temperature, depth =' + str(obs_temp['depth'][k].values) + 'm', extend='both', dextend='neither', clim=(-1.9, 10.5), dlim=(-2, 2), dcolormap=plt.cm.bwr, suptitle=dcase.casename + ', averaged ' + str(args.start_date) + ' to ' + str(args.end_date), proj='SP', save=figname + 'antarctic_temp.png') salt_obs = np.ma.masked_invalid(obs_salt['SALT'][k, :].values) polarcomparison(salt[k, :], salt_obs, grd, title1='model salinity, depth =' + str(ds['z_l'][k].values) + 'm', title2='observed salinity, depth =' + str(obs_temp['depth'][k].values) + 'm', extend='both', dextend='neither', clim=(33., 35.), dlim=(-2, 2), dcolormap=plt.cm.bwr, suptitle=dcase.casename + ', averaged ' + str(args.start_date) + ' to ' + str(args.end_date), proj='SP', save=figname + 'antarctic_salt.png') print('Arctic plots...') for k in range(km): if (ds['z_l'][k].values < 100.): temp_obs = np.ma.masked_invalid(obs_temp['TEMP'][k, :].values) polarcomparison(temp[k, :], temp_obs, grd, title1='model temperature, depth =' + str(ds['z_l'][k].values) + 'm', title2='observed temperature, depth =' + str(obs_temp['depth'][k].values) + 'm', extend='both', dextend='neither', clim=(-1.9, 11.5), dlim=(-2, 2), dcolormap=plt.cm.bwr, suptitle=dcase.casename + ', averaged ' + str(args.start_date) + ' to ' + str(args.end_date), proj='NP', save=figname + 'arctic_temp.png') salt_obs = np.ma.masked_invalid(obs_salt['SALT'][k, :].values) polarcomparison(salt[k, :], salt_obs, grd, title1='model salinity, depth =' + str(ds['z_l'][k].values) + 'm', title2='observed salinity, depth =' + str(obs_temp['depth'][k].values) + 'm', extend='both', dextend='neither', clim=(31.5, 35.), dlim=(-2, 2), dcolormap=plt.cm.bwr, suptitle=dcase.casename + ', averaged ' + str(args.start_date) + ' to ' + str(args.end_date), proj='NP', save=figname + 'arctic_salt.png') return
def main(stream=False): # Get options args = options() if not args.diff_rms and not args.surface and not args.forcing and not args.time_series: raise ValueError( "Please select -diff_rms, -time_series, -surface and/or -forcing.") # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path, 'r'), Loader=yaml.Loader) # set avg dates avg = diag_config_yml['Avg'] if not args.start_date: args.start_date = avg['start_date'] if not args.end_date: args.end_date = avg['end_date'] # Create the case instance dcase = DiagsCase(diag_config_yml['Case'], xrformat=True) print('Casename is:', dcase.casename) RUNDIR = dcase.get_value('RUNDIR') if not os.path.isdir('PNG/Horizontal_mean_biases'): print('Creating a directory to place figures (PNG)... \n') os.system('mkdir -p PNG/Horizontal_mean_biases') if not os.path.isdir('ncfiles'): print('Creating a directory to place netCDF files (ncfiles)... \n') os.system('mkdir ncfiles') # read grid grd = MOM6grid(RUNDIR + '/' + dcase.casename + '.mom6.static.nc', xrformat=True) area = grd.area_t.where(grd.wet > 0) # Get masking for different regions depth = grd.depth_ocean.values # remove Nan's, otherwise genBasinMasks won't work depth[np.isnan(depth)] = 0.0 basin_code = genBasinMasks(grd.geolon.values, grd.geolat.values, depth, xda=True) #select a few basins, namely, Global, MedSea,BalticSea,HudsonBay Arctic, # Pacific, Atlantic, Indian, Southern, LabSea and BaffinBay basins = basin_code.isel(region=[0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]) if args.diff_rms: horizontal_mean_diff_rms(grd, dcase, basins, args) if args.surface: #variables = ['SSH','tos','sos','mlotst','oml','speed', 'SSU', 'SSV'] variables = ['SSH', 'tos', 'sos', 'mlotst', 'oml', 'speed'] fname = '.mom6.hm_*.nc' xystats(fname, variables, grd, dcase, basins, args) if args.forcing: variables = [ 'friver', 'ficeberg', 'fsitherm', 'hfsnthermds', 'sfdsi', 'hflso', 'seaice_melt_heat', 'wfo', 'hfds', 'Heat_PmE' ] fname = '.mom6.hm_*.nc' xystats(fname, variables, grd, dcase, basins, args) if args.time_series: variables = ['thetaoga', 'soga'] fname = '.mom6.hm_*.nc' extract_time_series(fname, variables, grd, dcase, args) return
def __init__(self, climo_config: OrderedDict, diags_case: DiagsCase): """ Parameters ---------- climo_config : OrderedDict A dictionary containing the configuration for average or climatology data to generate This is usually read from a yaml file but may also be generated manually. An example yaml file to generate this dictionary: Climo: type: "avg" # "climo" or "avg" date0: "0001-01-01" date1: "0005-12-31" fields: ["thetao","so"] # fields to average freqs: ["1Y", "5Y"] # 1- and 5-yearly averages diags_case : mom6_tools.DiagsCase. Case instance """ self._climo_config = climo_config self.diags_case = diags_case # check if all the required entries are provided in climo_config: if len(self.required_entries - set(self._climo_config)) > 0: raise RuntimeError("Must provide all reqired fields:" + str(self.required_entries)) self._fields = self._climo_config['fields'] date_regex = "^\d{4}-\d{2}-\d{2}$" self._date0 = None self._date1 = None cft_datetime_constr = None calendar = diags_case.get_value("CALENDAR") if calendar == "NO_LEAP": cft_datetime_constr = cft.DatetimeNoLeap elif calendar == "GREGORIAN": cft_datetime_constr = cft.DatetimeGregorian else: raise RuntimeError(f"Unknown calendar type: {calendar}") allowable_types = ['avg', 'climo'] if self._climo_config['type'] not in allowable_types: raise RuntimeError("The type must be set to one of: " + str(allowable_types)) if 'date0' in self._climo_config: date0_str = self._climo_config['date0'] assert re.search(date_regex, date0_str), f"Invalid date0 entry: {date0_str}" calendar = diags_case.get_value("CALENDAR") self._date0 = cft_datetime_constr(int(date0_str[:4]), int(date0_str[5:7]), int(date0_str[8:10])) log.info(f"Climo date0: {self._date0}") if 'date1' in self._climo_config: date1_str = self._climo_config['date1'] assert re.search(date_regex, date1_str), f"Invalid date1 entry: {date1_str}" calendar = diags_case.get_value("CALENDAR") self._date1 = cft_datetime_constr(int(date1_str[:4]), int(date1_str[5:7]), int(date1_str[8:10])) log.info(f"Climo date1: {self._date1}")
def main(): # Get options args = options() nw = args.number_of_workers if not os.path.isdir('ncfiles'): print('Creating a directory to place figures (ncfiles)... \n') os.system('mkdir ncfiles') # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path,'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case']) args.case_name = dcase.casename RUNDIR = dcase.get_value('RUNDIR') print('Run directory is:', RUNDIR) print('Casename is:', dcase.casename) print('Number of workers to be used:', nw) # set avg dates avg = diag_config_yml['Avg'] if not args.start_date : args.start_date = avg['start_date'] if not args.end_date : args.end_date = avg['end_date'] # read grid info grd = MOM6grid(RUNDIR+'/'+dcase.casename+'.mom6.static.nc') depth = grd.depth_ocean # remote Nan's, otherwise genBasinMasks won't work depth[np.isnan(depth)] = 0.0 basin_code = m6toolbox.genBasinMasks(grd.geolon, grd.geolat, depth) parallel, cluster, client = m6toolbox.request_workers(nw) print('Reading {} dataset...'.format(args.file_name)) startTime = datetime.now() # load data def preprocess(ds): variables = ['diftrblo', 'difmxylo' ,'difmxybo', 'diftrelo'] for v in variables: if v not in ds.variables: ds[v] = xr.zeros_like(ds.vo) return ds[variables] ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+args.file_name, parallel=True, combine="nested", # concatenate in order of files concat_dim="time", # concatenate along time preprocess=preprocess, ).chunk({"time": 12}) print('Time elasped: ', datetime.now() - startTime) # compute yearly means first print('Computing yearly means...') startTime = datetime.now() ds_yr = ds.resample(time="1Y", closed='left').mean('time') print('Time elasped: ', datetime.now() - startTime) print('Selecting data between {} and {}...'.format(args.start_date, args.end_date)) startTime = datetime.now() ds_sel = ds_yr.sel(time=slice(args.start_date, args.end_date)) print('Time elasped: ', datetime.now() - startTime) print('Computing time mean...') startTime = datetime.now() ds_mean = ds_sel.mean('time').compute() print('Time elasped: ', datetime.now() - startTime) attrs = {'description': 'Time-mean mixing coefficients', 'units': 'm^2/s', 'start_date': avg['start_date'], 'end_date': avg['end_date'], 'casename': dcase.casename} m6toolbox.add_global_attrs(ds_mean,attrs) print('Saving netCDF files...') ds_mean.to_netcdf('ncfiles/'+str(args.case_name)+'_avg_mixing_coeffs.nc') print('Releasing workers ...') client.close(); cluster.close() return
def driver(args): nw = args.number_of_workers if not os.path.isdir('PNG/Equatorial'): print('Creating a directory to place figures (PNG/Equatorial)... \n') os.system('mkdir -p PNG/Equatorial') if not os.path.isdir('ncfiles'): print('Creating a directory to place netCDF files (ncfiles)... \n') os.system('mkdir ncfiles') # Read in the yaml file diag_config_yml = yaml.load(open(args.diag_config_yml_path,'r'), Loader=yaml.Loader) # Create the case instance dcase = DiagsCase(diag_config_yml['Case']) RUNDIR = dcase.get_value('RUNDIR') args.casename = dcase.casename print('Run directory is:', RUNDIR) print('Casename is:', args.casename) print('Number of workers: ', nw) # set avg dates avg = diag_config_yml['Avg'] if not args.start_date : args.start_date = avg['start_date'] if not args.end_date : args.end_date = avg['end_date'] # read grid info grd = MOM6grid(RUNDIR+'/'+args.casename+'.mom6.static.nc', xrformat=True) # select Equatorial region grd_eq = grd.sel(yh=slice(-10,10)) # load obs phc_path = '/glade/p/cesm/omwg/obs_data/phc/' phc_temp = xr.open_mfdataset(phc_path+'PHC2_TEMP_tx0.66v1_34lev_ann_avg.nc', decode_coords=False, decode_times=False) phc_salt = xr.open_mfdataset(phc_path+'PHC2_SALT_tx0.66v1_34lev_ann_avg.nc', decode_coords=False, decode_times=False) johnson = xr.open_dataset('/glade/p/cesm/omwg/obs_data/johnson_pmel/meanfit_m.nc') # get T and S and rename variables thetao_obs = phc_temp.TEMP.rename({'X': 'xh','Y': 'yh', 'depth': 'z_l'}); salt_obs = phc_salt.SALT.rename({'X': 'xh','Y': 'yh', 'depth': 'z_l'}); parallel, cluster, client = request_workers(nw) print('Reading surface dataset...') startTime = datetime.now() #variables = ['thetao', 'so', 'uo', 'time', 'time_bnds', 'e'] #def preprocess(ds): # ''' Compute yearly averages and return the dataset with variables''' # return ds[variables].resample(time="1Y", closed='left', \ # keep_attrs=True).mean(dim='time', keep_attrs=True) # load data def preprocess(ds): variables = ['thetao', 'so', 'uo', 'time', 'time_bnds', 'e'] return ds[variables] if parallel: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+'.mom6.h_*.nc', \ parallel=True, data_vars='minimal', \ coords='minimal', compat='override', preprocess=preprocess) else: ds = xr.open_mfdataset(RUNDIR+'/'+dcase.casename+'.mom6.monthly_*.nc', concat_dim=['time'],\ data_vars='minimal', coords='minimal', compat='override', preprocess=preprocess) print('Time elasped: ', datetime.now() - startTime) # set obs coords to be same as model thetao_obs['xh'] = ds.xh; thetao_obs['yh'] = ds.yh; salt_obs['xh'] = ds.xh; salt_obs['yh'] = ds.yh; print('Selecting data between {} and {} (time) and -10 to 10 (yh)...'.format(args.start_date, \ args.end_date)) startTime = datetime.now() ds = ds.sel(time=slice(args.start_date, args.end_date)).sel(yh=slice(-10,10)).isel(z_i=slice(0,15)).isel(z_l=slice(0,14)) print('Time elasped: ', datetime.now() - startTime) print('Yearly mean...') startTime = datetime.now() ds = ds.resample(time="1Y", closed='left',keep_attrs=True).mean('time',keep_attrs=True).compute() print('Time elasped: ', datetime.now() - startTime) print('Time averaging...') startTime = datetime.now() thetao = ds.thetao.mean('time') so = ds.so.mean('time') uo = ds.uo.mean('time') eta = ds.e.mean('time') # find point closest to eq. and select data j = np.abs( grd_eq.geolat[:,0].values - 0. ).argmin() temp_eq = np.ma.masked_invalid(thetao.isel(yh=j).values) salt_eq = np.ma.masked_invalid(so.isel(yh=j).values) u_eq = np.ma.masked_invalid(uo.isel(yh=j).values) e_eq = np.ma.masked_invalid(eta.isel(yh=j).values) thetao_obs_eq = np.ma.masked_invalid(thetao_obs.sel(yh=slice(-10,10)).isel(yh=j).isel(z_l=slice(0,14)).values) salt_obs_eq = np.ma.masked_invalid(salt_obs.sel(yh=slice(-10,10)).isel(yh=j).isel(z_l=slice(0,14)).values) print('Time elasped: ', datetime.now() - startTime) if parallel: print('\n Releasing workers...') client.close(); cluster.close() print('Equatorial Upper Ocean plots...') y = ds.yh.values zz = ds.z_i.values x = ds.xh.values [X, Z] = np.meshgrid(x, zz) z = 0.5 * ( Z[:-1] + Z[1:]) figname = 'PNG/Equatorial/'+str(dcase.casename)+'_' yzcompare(temp_eq , thetao_obs_eq, x, -Z, title1 = 'model temperature', ylabel='Longitude', yunits='', title2 = 'observed temperature (PHC/Levitus)', #contour=True, suptitle=dcase.casename + ', averaged '+str(args.start_date)+ ' to ' +str(args.end_date), extend='neither', dextend='neither', clim=(6,31.), dlim=(-5,5), dcolormap=plt.cm.bwr, save=figname+'Equatorial_Global_temperature.png') yzcompare(salt_eq , salt_obs_eq, x, -Z, title1 = 'model salinity', ylabel='Longitude', yunits='', title2 = 'observed salinity (PHC/Levitus)', #contour=True, suptitle=dcase.casename + ', averaged '+str(args.start_date)+ ' to ' +str(args.end_date), extend='neither', dextend='neither', clim=(33.5,37.), dlim=(-1,1), dcolormap=plt.cm.bwr, save=figname+'Equatorial_Global_salinity.png') # create dataarays and saving data temp_eq_da = xr.DataArray(temp_eq, dims=['zl','xh'], coords={'zl' : z[:,0], 'xh' : x[:]}).rename('temp_eq') temp_eq_da.to_netcdf('ncfiles/'+str(args.casename)+'_temp_eq.nc') salt_eq_da = xr.DataArray(salt_eq, dims=['zl','xh'], coords={'zl' : z[:,0], 'xh' : x[:]}).rename('salt_eq') salt_eq_da.to_netcdf('ncfiles/'+str(args.casename)+'_salt_eq.nc') # Shift model data to compare against obs tmp, lonh = shiftgrid(thetao.xh[-1].values, thetao[0,0,:].values, ds.thetao.xh.values) tmp, lonq = shiftgrid(uo.xq[-1].values, uo[0,0,:].values, uo.xq.values) thetao['xh'].values[:] = lonh so['xh'].values[:] = lonh uo['xq'].values[:] = lonq # y and z from obs y_obs = johnson.YLAT11_101.values zz = np.arange(0,510,10) [Y, Z_obs] = np.meshgrid(y_obs, zz) z_obs = 0.5 * ( Z_obs[0:-1,:] + Z_obs[1:,] ) # y and z from model y_model = thetao.yh.values z = eta.z_i.values [Y, Z_model] = np.meshgrid(y_model, z) z_model = 0.5 * ( Z_model[0:-1,:] + Z_model[1:,:] ) # longitutes to be compared longitudes = [143., 156., 165., 180., 190., 205., 220., 235., 250., 265.] for l in longitudes: # Temperature fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(16,8)) dummy_model = np.ma.masked_invalid(thetao.sel(xh=l, method='nearest').values) dummy_obs = np.ma.masked_invalid(johnson.POTEMPM.sel(XLON=l, method='nearest').values) yzplot(dummy_model, y_model, -Z_model, clim=(7,30), axis=ax1, zlabel='Depth', ylabel='Latitude', title=str(dcase.casename)) cs1 = ax1.contour( y_model + 0*z_model, -z_model, dummy_model, levels=np.arange(0,30,2), colors='k',); plt.clabel(cs1,fmt='%3.1f', fontsize=14) ax1.set_ylim(-400,0) yzplot(dummy_obs, y_obs, -Z_obs, clim=(7,30), axis=ax2, zlabel='Depth', ylabel='Latitude', title='Johnson et al (2002)') cs2 = ax2.contour( y_obs + 0*z_obs, -z_obs, dummy_obs, levels=np.arange(0,30,2), colors='k',); plt.clabel(cs2,fmt='%3.1f', fontsize=14) ax2.set_ylim(-400,0) plt.suptitle('Temperature [C] @ '+str(l)+ ', averaged between '+str(args.start_date)+' and '+str(args.end_date)) plt.savefig(figname+'temperature_'+str(l)+'.png') # Salt fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(16,8)) dummy_model = np.ma.masked_invalid(so.sel(xh=l, method='nearest').values) dummy_obs = np.ma.masked_invalid(johnson.SALINITYM.sel(XLON=l, method='nearest').values) yzplot(dummy_model, y_model, -Z_model, clim=(32,36), axis=ax1, zlabel='Depth', ylabel='Latitude', title=str(dcase.casename)) cs1 = ax1.contour( y_model + 0*z_model, -z_model, dummy_model, levels=np.arange(32,36,0.5), colors='k',); plt.clabel(cs1,fmt='%3.1f', fontsize=14) ax1.set_ylim(-400,0) yzplot(dummy_obs, y_obs, -Z_obs, clim=(32,36), axis=ax2, zlabel='Depth', ylabel='Latitude', title='Johnson et al (2002)') cs2 = ax2.contour( y_obs + 0*z_obs, -z_obs, dummy_obs, levels=np.arange(32,36,0.5), colors='k',); plt.clabel(cs2,fmt='%3.1f', fontsize=14) ax2.set_ylim(-400,0) plt.suptitle('Salinity [psu] @ '+str(l)+ ', averaged between '+str(args.start_date)+' and '+str(args.end_date)) plt.savefig(figname+'salinity_'+str(l)+'.png') # uo fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(16,8)) dummy_model = np.ma.masked_invalid(uo.sel(xq=l, method='nearest').values) dummy_obs = np.ma.masked_invalid(johnson.UM.sel(XLON=l, method='nearest').values) yzplot(dummy_model, y_model, -Z_model, clim=(-0.6,1.2), axis=ax1, zlabel='Depth', ylabel='Latitude', title=str(dcase.casename)) cs1 = ax1.contour( y_model + 0*z_model, -z_model, dummy_model, levels=np.arange(-1.2,1.2,0.1), colors='k',); plt.clabel(cs1,fmt='%3.1f', fontsize=14) ax1.set_ylim(-400,0) yzplot(dummy_obs, y_obs, -Z_obs, clim=(-0.6,1.2), axis=ax2, zlabel='Depth', ylabel='Latitude', title='Johnson et al (2002)') cs2 = ax2.contour( y_obs + 0*z_obs, -z_obs, dummy_obs, levels=np.arange(-1.2,1.2,0.1), colors='k',); plt.clabel(cs2,fmt='%3.1f', fontsize=14) ax2.set_ylim(-400,0) plt.suptitle('Eastward velocity [m/s] @ '+str(l)+ ', averaged between '+str(args.start_date)+' and '+str(args.end_date)) plt.savefig(figname+'uo_'+str(l)+'.png') # Eastward velocity [m/s] along the Equatorial Pacific x_obs = johnson.XLON.values [X_obs, Z_obs] = np.meshgrid(x_obs, zz) z_obs = 0.5 * ( Z_obs[:-1,:] + Z_obs[1:,:] ) x_model = so.xh.values z = eta.z_i.values [X, Z_model] = np.meshgrid(x_model, z) z_model = 0.5 * ( Z_model[:-1,:] + Z_model[1:,:] ) #from mom6_tools.m6plot import fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(16,8)) dummy_obs = np.ma.masked_invalid(johnson.UM.sel(YLAT11_101=0).values) dummy_model = np.ma.masked_invalid(uo.sel(yh=0, method='nearest').values) yzplot(dummy_model, x_model, -Z_model, clim=(-0.6,1.2), axis=ax1, landcolor=[0., 0., 0.], title=str(dcase.casename), ylabel='Longitude') cs1 = ax1.contour( x_model + 0*z_model, -z_model, dummy_model, levels=np.arange(-1.2,1.2,0.1), colors='k'); plt.clabel(cs1,fmt='%2.1f', fontsize=14) ax1.set_xlim(143,265); ax1.set_ylim(-400,0) yzplot(dummy_obs, x_obs, -Z_obs, clim=(-0.4,1.2), ylabel='Longitude', yunits='', axis=ax2, title='Johnson et al (2002)') cs1 = ax2.contour( x_obs + 0*z_obs, -z_obs, dummy_obs, levels=np.arange(-1.2,1.2,0.1), colors='k'); plt.clabel(cs1,fmt='%2.1f', fontsize=14) ax2.set_xlim(143,265); ax2.set_ylim(-400,0) plt.suptitle('Eastward velocity [m/s] along the Equatorial Pacific, averaged between '+str(args.start_date)+' and '+str(args.end_date)) plt.savefig(figname+'Equatorial_Pacific_uo.png') plt.close('all') return