def get_geosfp_hourly_A1(root_dir, date, varnames, filename='GEOSFP.YYYYMMDD.A1.2x25.nc', verbose=True): """ GEOSFP.YYYYMMDD.A1.*.nc times are 00:30, 01:30, ......, 23:30. The function read data and interpolate them to 01:00, 02:00, ......, 24:00. (ywang, 03/24/20) Parameters ---------- root_dir : str Data root directory date : str YYYYMMDD varnames : list Variables to be processed fillename : str For example, 'GEOSFP.20180630.A1.2x25.nc' verbose : bool Output more information. Returns ------- out_dict : dict """ # 24 hourl a day N = 24 if verbose: print(' - get_geosfp_hourly_A1: process ' + date) # Date currDate_D = datetime.datetime.strptime(date, '%Y%m%d') nextDate_D = currDate_D + datetime.timedelta(days=1) # directory if root_dir[-1] != '/': root_dir = root_dir + '/' # files curr_file = get_filename(root_dir, str(currDate_D), filename) next_file = get_filename(root_dir, str(nextDate_D), filename) # read and process data curr_dict = read_nc(curr_file, varnames, verbose=True) next_dict = read_nc(next_file, varnames, verbose=True) out_dict = {} for varn in varnames: tmp = np.vstack((curr_dict[varn], next_dict[varn])) out_dict[varn] = (tmp[0:N] + tmp[1:N + 1]) * 0.5 return out_dict
def read_tropomi_no2(filename, varnames=[], replace=False, verbose=False, squeeze=True): """ Read TROPOMI NO2 product. Parameters ---------- filename : str S5P_RPRO_L2__NO2*.nc file. varnames : list A list of variable name. replace : logical (default False) If True, replcae, else append verbose : logical (default False) Whether or not output more information. Returns ------- out_data : dict A dictionary of variables. """ if verbose: print(' - read_tropomi_no2: reading ' + filename) # all variables names all_varnames = [ 'PRODUCT/latitude', 'PRODUCT/longitude', 'PRODUCT/nitrogendioxide_tropospheric_column', 'PRODUCT/qa_value', 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/\ cloud_radiance_fraction_nitrogendioxide_window', 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/solar_zenith_angle', 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/viewing_zenith_angle', ] if replace: all_varnames = varnames else: all_varnames.extend(varnames) # remove duplicated variables all_varnames = set(all_varnames) all_varnames = list(all_varnames) # read data out_data = read_nc(filename, all_varnames, squeeze=squeeze) return out_data
gf_para.region_dict['region4'], gf_para.region_dict['region5'], ] no2_rad_sub_region_color = ('black', 'red') # # End user parameters ##################### no2_title = '_'.join(no2_name.split('_')[-2:]) rad_title = rad_name # read TROPOMI data no2_filename = no2_data_dir + no2_name + '.nc' no2_data = read_nc(no2_filename, ['lat_e', 'lon_e', 'column_amount']) no2_lat_e = no2_data['lat_e'] no2_lon_e = no2_data['lon_e'] no2_lon_e, no2_lat_e = np.meshgrid(no2_lon_e, no2_lat_e) no2_column_amount = no2_data['column_amount'] * mol_m2_to_molec_cm2 # read VIIRS nighttime light rad_filename = rad_data_dir + rad_name + '.nc' rad_data = read_nc(rad_filename, ['lat_c', 'lon_c', 'radiance', 'count']) rad_value = rad_data['radiance'] # plot pout = plot_two_variables(no2_lat_e, no2_lon_e, no2_column_amount, no2_lat_e,
def plot_pearsonr_map(filename, fig_dir, varname, name='', p_thre=0.05, r_vmin=None, r_vmax=None, r_cmap=plt.get_cmap('seismic'), r_signi_cmap=plt.get_cmap('seismic'), r_signi_vmin=None, r_signi_vmax=None, r_signi_min_valid=None, p_vmin=None, p_vmax=None, p_cmap=None, signi_ocean_color=None, countries=True, states=True, cl_res='110m', cl_color=None, lw=None, equator=False, NH_label='', SH_label=''): """ Plot pearson correlation coefficent. (ywang, 05/27/20) Parameters ---------- filename : str netCDF file of trend_analysis results. fig_dir : str Directory to save figures. varname : str variable name name : str Prefix of figure names. p_thre : float p-value threshod. signi_ocean_color : None or str If None, the *signi_ocean_color* color to mask r_signi map if it is not None """ # directory if fig_dir[-1] != '/': fig_dir = fig_dir + '/' # variables to be read varnames = ['Latitude_e', 'Longitude_e', \ 'r_'+varname, 'p_'+varname] # read data data = read_nc(filename, varnames, verbose=True) # get latitude and longitude edges lat_e = data['Latitude_e'] lon_e = data['Longitude_e'] # get correlation coefficients and p-value r_val = data['r_' + varname] p_val = data['p_' + varname] cbar_prop = {} cbar_prop['orientation'] = 'horizontal' # equator line eqr_c = 'white' eqr_ls = '--' eqr_ls_lw = 2 # plot correlation coefficients r_plot = cartopy_plot(lon_e, lat_e, r_val, cbar_prop=cbar_prop, countries=countries, states=states, cmap=r_cmap, vmin=r_vmin, vmax=r_vmax) r_plot['cb'].set_label('Linear correlation coefficient') if equator: r_plot['ax'].plot([-180, 180], [0, 0], color=eqr_c, linestyle=eqr_ls, lw=eqr_ls_lw, transform=ccrs.PlateCarree()) r_plot['ax'].text(-175, 5, NH_label, color=eqr_c, ha='left', va='bottom', transform=ccrs.Geodetic()) r_plot['ax'].text(-175, -5, SH_label, color=eqr_c, ha='left', va='top', transform=ccrs.Geodetic()) # save correlation coefficients plot fig_r = fig_dir + name + '_r_' + varname + '.png' plt.savefig(fig_r, format='png', dpi=300) # plot p value p_plot = cartopy_plot(lon_e, lat_e, p_val, cbar_prop=cbar_prop, countries=countries, states=states, cmap=p_cmap, vmin=p_vmin, vmax=p_vmax) p_plot['cb'].set_label('p-value') if equator: p_plot['ax'].plot([-180, 180], [0, 0], color=eqr_c, linestyle=eqr_ls, lw=eqr_ls_lw, transform=ccrs.PlateCarree()) p_plot['ax'].text(-175, 5, NH_label, color=eqr_c, ha='left', va='bottom', transform=ccrs.Geodetic()) p_plot['ax'].text(-175, -5, SH_label, color=eqr_c, ha='left', va='top', transform=ccrs.Geodetic()) # save p-value fig_p = fig_dir + name + '_p_' + varname + '.png' plt.savefig(fig_p, format='png', dpi=300) # plot correlation coefficients with p-value less than p_thre r_val_signi = deepcopy(r_val) flag = (p_val < p_thre) r_val_signi[np.logical_not(flag)] = np.nan if r_signi_min_valid is not None: r_val_signi[r_val_signi < r_signi_min_valid] = np.nan r_signi_plot = cartopy_plot(lon_e, lat_e, r_val_signi, cbar_prop=cbar_prop, countries=countries, states=states, cmap=r_signi_cmap, vmin=r_signi_vmin, vmax=r_signi_vmax) r_signi_plot['cb'].set_label('Linear correlation coefficient' + \ ' (p<{:})'.format(p_thre)) if equator: r_signi_plot['ax'].plot([-180, 180], [0, 0], color=eqr_c, linestyle=eqr_ls, lw=eqr_ls_lw, transform=ccrs.PlateCarree(), zorder=200) r_signi_plot['ax'].text(-175, 5, NH_label, color=eqr_c, ha='left', va='bottom', transform=ccrs.Geodetic(), zorder=200) r_signi_plot['ax'].text(-175, -5, SH_label, color=eqr_c, ha='left', va='top', transform=ccrs.Geodetic(), zorder=200) if signi_ocean_color is not None: r_signi_plot['ax'].add_feature(cfeature.OCEAN, color=signi_ocean_color, zorder=100) r_signi_plot['ax'].coastlines(resolution=cl_res, color=cl_color, lw=lw, zorder=300) # save correlation coefficients plot fig_r_signi = fig_dir + name + '_r_p' + str(p_thre) + '_' + \ varname + '.png' plt.savefig(fig_r_signi, format='png', dpi=300)
latlon_flag = True while currDate_D <= endDate_D: # current date currDate = str(currDate_D)[0:10] print(''.join(np.full((79, ), '-'))) print('processing ' + currDate) # read data in_filename = daily_dir + 'model_satellite_' + \ currDate + '.nc' if os.path.exists(in_filename): if latlon_flag: latlon_flag = False in_data = read_nc(in_filename, varname_list + coord_name_list, verbose=True) for varname in coord_name_list: out_dict[varname] = in_data.pop(varname) else: in_data = read_nc(in_filename, varname_list, verbose=True) in_data_list.append(in_data) in_count_list.append(in_data[num_name]) # go to next day currDate_D = currDate_D + datetime.timedelta(days=1) in_count_sum = np.array(in_count_list, dtype=int) in_count_sum = (in_count_sum >= 0) in_count_sum = np.sum(in_count_sum, axis=0) out_dict['count'] = in_count_sum
def plot_trend_map( filename, fig_dir, mean_flag=True, trend_flag=True, sigma_flag=True, name='', mean_vmin=None, mean_vmax=None, mean_cmap=None, mean_units='', trend_vmin=None, trend_vmax=None, trend_cmap=plt.get_cmap('seismic'), trend_units='', sigma_units='', region_limit=None, xtick=np.arange(-180, 180.0, 60), ytick=np.arange(-90, 90.1, 30), countries=True, states=True, mean_mask=None, ): """ Plot results from trend_analysis. (ywang, 05/21/20) Parameters ---------- filename : str netCDF file of trend_analysis results. fig_dir : str Directory to save figures. mean_flag : bool Plot mean or not trend_flag : bool Plot trend or not sigma_flag : bool Plot trend standard deviation or not name : str Prefix of figure names. """ # directory if fig_dir[-1] != '/': fig_dir = fig_dir + '/' # variables to be read varnames = ['Latitude_e', 'Longitude_e'] if mean_flag: varnames.append('mean') if trend_flag: varnames.append('trend') if sigma_flag: varnames.append('trend_sigma') # read data data = read_nc(filename, varnames, verbose=True) # get latitude and longitude edges lat_e = data['Latitude_e'] lon_e = data['Longitude_e'] cbar_prop = {} cbar_prop['orientation'] = 'horizontal' # plot mean if mean_flag: mean = data['mean'] if mean_mask is None: mean_mask = np.full_like(mean, False) mean = np.ma.masked_array(mean, mean_mask) mean_p = cartopy_plot(lon_e, lat_e, mean, cbar_prop=cbar_prop, countries=countries, states=states, cmap=mean_cmap, xtick=xtick, ytick=ytick, vmin=mean_vmin, vmax=mean_vmax) mean_p['cb'].set_label(mean_units) if region_limit is not None: mean_p['ax'].set_xlim((region_limit[1], region_limit[3])) mean_p['ax'].set_ylim((region_limit[0], region_limit[2])) fig_mean = fig_dir + name + '_mean.png' plt.savefig(fig_mean, format='png', dpi=300) # plot trend if trend_flag: trend = data['trend'] trend_p = cartopy_plot(lon_e, lat_e, trend, cbar_prop=cbar_prop, vmin=trend_vmin, vmax=trend_vmax, countries=countries, states=states, xtick=xtick, ytick=ytick, cmap=trend_cmap) trend_p['cb'].set_label(trend_units) if region_limit is not None: trend_p['ax'].set_xlim((region_limit[1], region_limit[3])) trend_p['ax'].set_ylim((region_limit[0], region_limit[2])) fig_trend = fig_dir + name + '_trend.png' plt.savefig(fig_trend, format='png', dpi=300) # plot trend_sigma if sigma_flag: sigma = data['trend_sigma'] if not trend_flag: trend = data['trend'] # plot sigma sigma_p = cartopy_plot(lon_e, lat_e, sigma, cbar_prop=cbar_prop, xtick=xtick, ytick=ytick, countries=countries, states=states) sigma_p['cb'].set_label(sigma_units) if region_limit is not None: sigma_p['ax'].set_xlim((region_limit[1], region_limit[3])) sigma_p['ax'].set_ylim((region_limit[0], region_limit[2])) # save sigma plot fig_sigma = fig_dir + name + '_sigma.png' plt.savefig(fig_sigma, format='png', dpi=300) # trends that are significant trend_signi_flag = (np.absolute(trend) / sigma) >= 2.0 trend_signi = \ np.ma.masked_array(trend, np.logical_not(trend_signi_flag)) # plot trends that are significant trend_signi_p = cartopy_plot(lon_e, lat_e, trend_signi, cbar_prop=cbar_prop, countries=countries, states=states, xtick=xtick, ytick=ytick, vmin=trend_vmin, vmax=trend_vmax, cmap=trend_cmap) trend_signi_p['cb'].set_label(trend_units) if region_limit is not None: trend_signi_p['ax'].set_xlim((region_limit[1], region_limit[3])) trend_signi_p['ax'].set_ylim((region_limit[0], region_limit[2])) # save trends that are significant plot fig_trend_signi = fig_dir + name + '_trend_signi.png' plt.savefig(fig_trend_signi, format='png', dpi=300)
def get_geosfp_hourly_A1_3days_direct(root_dir, date, varnames, filename='GEOSFP.YYYYMMDD.A1.2x25.nc', verbose=True): """ Read GEOSFP.YYYYMMDD.A1.*.nc directly, thus times are 00:30, 01:30, ......, 23:30. (ywang, 03/26/20) Parameters ---------- root_dir : str Data root directory date : str YYYYMMDD varnames : list Variables to be processed fillename : str For example, 'GEOSFP.20180630.A1.2x25.nc' verbose : bool Output more information. Returns ------- out_dict : dict """ print(' - get_geosfp_hourly_A1_3days_direct: ' + date) # directory if root_dir[-1] != '/': root_dir = root_dir + '/' # Date currDate_D = datetime.datetime.strptime(date, '%Y%m%d') preDate_D = currDate_D + datetime.timedelta(days=-1) nextDate_D = currDate_D + datetime.timedelta(days=1) preDate = str(preDate_D) nextDate = str(nextDate_D) p_date = preDate[0:4] + preDate[5:7] + preDate[8:10] c_date = date n_date = nextDate[0:4] + nextDate[5:7] + nextDate[8:10] # all dates in a list date_list = [p_date, c_date, n_date] # get all data out_dict = {} for varn in varnames: out_dict[varn] = [] for i in range(len(date_list)): fn = root_dir + c_date[0:4] + '/' + c_date[4:6] + '/' + \ filename.replace('YYYYMMDD', date_list[i]) # Determine if file exists if not os.path.exists(fn): print(' - get_geosfp_hourly_A1_3days_direct: WARNING! ' + \ filename + ' does not exist.') continue tmp_dict = read_nc(fn, varnames, verbose=True) for varn in varnames: out_dict[varn].append(tmp_dict[varn]) for varn in varnames: out_dict[varn] = np.vstack(out_dict[varn]) return out_dict
def read_inst_resample(file_list, varname_list, latlon_flag=True, time=True): """ read data from GC instant output files for resampling according to satellite overpass time. Parameters ---------- file_list : list A list of instant output files. It usually includes files of previous one day, current day, and next one day. varname_list : list A list of variable names. latlon_flag : bool Whether or not get latitude and longitude time : bool Whether or not get time. Returns ------- """ out_dict = {} # 3-D or 4-D variables for varname in varname_list: out_dict[varname] = [] if time: date_time = [] # read data for i in range(len(file_list)): filename = file_list[i] # Determine if file exists if not os.path.exists(filename): print(' - read_inst_resample: WARNING! ' + filename + \ ' does not exist.') continue # get variables data_1 = read_nc(filename, varname_list, verbose=True) for varname in varname_list: out_dict[varname].append(data_1[varname]) # get time if time: fid = Dataset(filename, 'r') time_var = fid.variables['time'] time_start = getattr(time_var, 'units') time_start = time_start.split() dt_string = time_start[2] + ' ' + time_start[3] time_start = datetime.datetime.strptime(dt_string, '%Y-%m-%d %H:%M:%S') time_delta = time_var[:] for i in range(len(time_delta)): date_time.append(time_start + \ datetime.timedelta(minutes=time_delta[i])) fid.close() # get latitude and longitude if latlon_flag: latlon_flag = False data_3 = read_nc(filename, ['lat', 'lon'], verbose=True) # centers out_dict['latitude'] = data_3['lat'] out_dict['longitude'] = data_3['lon'] # latitudue edges out_dict['latitude_e'] = \ ( data_3['lat'][:-1] + data_3['lat'][1:] ) * 0.5 lat_del_1 = out_dict['latitude_e'][1] - out_dict['latitude_e'][0] lat_del_2 = out_dict['latitude_e'][2] - out_dict['latitude_e'][1] if ((lat_del_2 - lat_del_1) < 1e-6): out_dict['latitude_e'] = np.insert( out_dict['latitude_e'], 0, out_dict['latitude_e'][0] - lat_del_2) out_dict['latitude_e'] = np.insert( out_dict['latitude_e'], len(out_dict['latitude_e']), out_dict['latitude_e'][-1] + lat_del_2) else: out_dict['latitude_e'][0] = \ out_dict['latitude_e'][1] - lat_del_2 out_dict['latitude_e'][-1] = \ out_dict['latitude_e'][-2] + lat_del_2 lat_del_half = lat_del_2 * 0.5 out_dict['latitude_e'] = np.insert( out_dict['latitude_e'], 0, out_dict['latitude_e'][0] - lat_del_half) out_dict['latitude_e'] = np.insert( out_dict['latitude_e'], len(out_dict['latitude_e']), out_dict['latitude_e'][-1] + lat_del_half) # longitude edges out_dict['longitude_e'] = \ ( data_3['lon'][:-1] + data_3['lon'][1:] ) * 0.5 lon_del = out_dict['longitude_e'][1] - out_dict['longitude_e'][0] out_dict['longitude_e'] = np.insert( out_dict['longitude_e'], 0, out_dict['longitude_e'][0] - lon_del) out_dict['longitude_e'] = np.insert( out_dict['longitude_e'], len(out_dict['longitude_e']), out_dict['longitude_e'][-1] + lon_del) # conver date_time to TAI93 if time: out_dict['date_time'] = date_time out_dict['TAI93'] = [] time93 = datetime.datetime.strptime('1993-01-01', '%Y-%m-%d') for i in range(len(date_time)): diff = date_time[i] - time93 day_hours = 24.0 hour_seconds = 3600.0 out_dict['TAI93'].append(diff.days * day_hours * hour_seconds \ + diff.seconds) out_dict['TAI93'] = np.array(out_dict['TAI93']) # merge variables for varname in varname_list: # concatenate arrays out_dict[varname] = np.concatenate(out_dict[varname], axis=0) if (out_dict[varname].ndim == 4): # move axis # move lev axis to the last. out_dict[varname] = np.moveaxis(out_dict[varname], 1, 3) return out_dict
def compare_two_varilables(filename, varname1, varname2, vmin=0.0, vmax=0.8, diff_min=None, diff_max=None, seperate_cbar=False, region_limit=None, sc_xlabel='', sc_ylabel='', sc_max=1.0, verbose=True): """ """ # read data coord_varns = ['Latitude_e', 'Longitude_e'] some_varns = [varname1, varname2] varnames = coord_varns + some_varns data_dict = read_nc(filename, varnames, verbose=verbose) # region_limit if region_limit is not None: lat_e_1D = data_dict['Latitude_e'][:,0] lon_e_1D = data_dict['Longitude_e'][0,:] # index i1 = get_center_index(lat_e_1D, region_limit[0]) i2 = get_center_index(lat_e_1D, region_limit[2]) j1 = get_center_index(lon_e_1D, region_limit[1]) j2 = get_center_index(lon_e_1D, region_limit[3]) for varn in varnames: if varn in coord_varns: data_dict[varn] = data_dict[varn][i1:i2+2,j1:j2+2] if varn in some_varns: data_dict[varn] = data_dict[varn][i1:i2+1,j1:j2+1] # plot fig = plt.figure(figsize=(8,7)) plt.subplots_adjust(top=0.98) ax_list = [] xtick = [100, 110, 120] ytick = [30, 40, 50] xtick_list = [xtick, xtick, xtick] ytick_list = [ytick, [], ytick] for i in range(len(xtick_list)): ax = add_geoaxes(fig, int('22'+str(i+1)), xtick=xtick_list[i], ytick=ytick_list[i]) ax_list.append(ax) lat_e = data_dict['Latitude_e'] lon_e = data_dict['Longitude_e'] # colorbar height h = 0.02 # variables for i in range(len(some_varns)): varn = some_varns[i] var = data_dict[varn] ax = ax_list[i] var_out = cartopy_plot(lon_e, lat_e, var, ax=ax, vmin=vmin, vmax=vmax, cbar=seperate_cbar, cmap=deepcopy(WhGrYlRd_map)) add_China_province(ax) # colorbar for variables ax1 = ax_list[0] ax2 = ax_list[1] cax_v = h_2_ax(fig, ax1, ax2, y_off=-0.06, height=h) plt.colorbar(var_out['mesh'], cax=cax_v, orientation='horizontal') right_center_label(cax_v, '[DU]') # difference ax = ax_list[2] var_diff = data_dict[varname2] - data_dict[varname1] diff_out = cartopy_plot(lon_e, lat_e, var_diff, ax=ax, vmin=diff_min, vmax=diff_max, cbar=seperate_cbar, cmap=plt.get_cmap('seismic')) add_China_province(ax) # colorbar of difference ax = ax_list[2] cax_diff = h_1_ax(fig, ax, y_off=-0.06, height=h) plt.colorbar(diff_out['mesh'], cax=cax_diff, orientation='horizontal') right_center_label(cax_diff, '[DU]') # scatter plot ax_sc = fig.add_subplot(224) ax_sc.set_aspect('equal') ax_sc.set_xlabel(sc_xlabel) ax_sc.set_ylabel(sc_ylabel) ax_sc.set_xlim([0,1.0]) ax_sc.set_ylim([0,1.0]) label_ul = ['R', 'linear_eq', 'rmse', 'nmb', 'mb', 'N'] scatter(ax_sc, data_dict[varname1], data_dict[varname2], s=3, label_ul=label_ul) # region if region_limit is not None: for ax in ax_list: ax.set_xlim(region_limit[1], region_limit[3]) ax.set_ylim(region_limit[0], region_limit[2])
'joint_s200_gc_v_so2', 'joint_s200_omi_v_gc_so2' ] # OMI GC no AK no_AK_dir = '../data/sub_OMI_SO2_no_AK/' verbose = True figdir = '../figure/' # # End user parameters ##################### # read GC, OMI data gc_omi_vars = read_nc(gc_omi_file, gc_omi_varname_list, verbose=verbose) print('Latitude_e:') print(gc_omi_vars['Latitude_e'][:, 0]) print('Latitude:') print(gc_omi_vars['Latitude'][:, 0]) print('Longitude_e:') print(gc_omi_vars['Longitude_e'][0, :]) print('Longitude:') print(gc_omi_vars['Longitude'][0, :]) prior_gc_v_so2 = gc_omi_vars['prior_gc_v_so2'] post_gc_v_so2 = gc_omi_vars['post_gc_v_so2'] joint_s200_gc_v_so2 = gc_omi_vars['joint_s200_gc_v_so2'] prior_omi_v_gc_so2 = gc_omi_vars['prior_omi_v_gc_so2'] post_omi_v_gc_so2 = gc_omi_vars['post_omi_v_gc_so2'] joint_s200_omi_v_gc_so2 = gc_omi_vars['joint_s200_omi_v_gc_so2']